lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
bsd-3-clause
67adca7c746f522ceec17e2cf4ac8f75c61be94e
0
buckett/raven-java,littleyang/raven-java,reki2000/raven-java6,littleyang/raven-java,galmeida/raven-java,buckett/raven-java,galmeida/raven-java,reki2000/raven-java6
package net.kencochrane.raven.connection; import mockit.Injectable; import mockit.NonStrictExpectations; import mockit.Tested; import mockit.Verifications; import net.kencochrane.raven.Raven; import net.kencochrane.raven.event.Event; import org.mockito.MockitoAnnotations; import org.mockito.Spy; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.concurrent.locks.ReentrantLock; import static mockit.Deencapsulation.setField; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.verify; public class AbstractConnectionTest { @Injectable private final String publicKey = "9bcf4a8c-f353-4f25-9dda-76a873fff905"; @Injectable private final String secretKey = "56a9d05e-9032-4fdd-8f67-867d526422f9"; @Tested private AbstractConnection abstractConnection; //Spying with mockito as jMockit doesn't support mocks of ReentrantLock @Spy private ReentrantLock reentrantLock = new ReentrantLock(); @BeforeMethod public void setUp() throws Exception { MockitoAnnotations.initMocks(this); setField(Raven.class, "NAME", "Raven-Java/Test"); } @Test public void testAuthHeader() throws Exception { String authHeader = abstractConnection.getAuthHeader(); assertThat(authHeader, is("Sentry sentry_version=5," + "sentry_client=Raven-Java/Test," + "sentry_key=" + publicKey + "," + "sentry_secret=" + secretKey)); } @Test public void testSuccessfulSendCallsDoSend(@Injectable final Event mockEvent) throws Exception { setField(abstractConnection, "lock", reentrantLock); abstractConnection.send(mockEvent); new Verifications() {{ abstractConnection.doSend(mockEvent); }}; } @Test public void testExceptionOnSendStartLockDown(@Injectable final Event mockEvent) throws Exception { setField(abstractConnection, "lock", reentrantLock); new NonStrictExpectations() {{ abstractConnection.doSend((Event) any); result = new ConnectionException(); }}; abstractConnection.send(mockEvent); verify(reentrantLock).tryLock(); verify(reentrantLock).unlock(); } }
raven/src/test/java/net/kencochrane/raven/connection/AbstractConnectionTest.java
package net.kencochrane.raven.connection; import mockit.Injectable; import mockit.Mock; import mockit.MockUp; import net.kencochrane.raven.Raven; import net.kencochrane.raven.event.Event; import org.mockito.MockitoAnnotations; import org.mockito.Spy; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.io.IOException; import java.util.concurrent.locks.ReentrantLock; import static mockit.Deencapsulation.setField; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.verify; public class AbstractConnectionTest { private final String publicKey = "9bcf4a8c-f353-4f25-9dda-76a873fff905"; private final String secretKey = "56a9d05e-9032-4fdd-8f67-867d526422f9"; private AbstractConnection abstractConnection; //Spying with mockito as jMockit doesn't support mocks of ReentrantLock @Spy private ReentrantLock reentrantLock = new ReentrantLock(); @BeforeMethod public void setUp() throws Exception { MockitoAnnotations.initMocks(this); setField(Raven.class, "NAME", "Raven-Java/Test"); abstractConnection = new DummyAbstractConnection(publicKey, secretKey); setField(abstractConnection, "lock", reentrantLock); } @Test public void testAuthHeader() throws Exception { String authHeader = abstractConnection.getAuthHeader(); assertThat(authHeader, is("Sentry sentry_version=5," + "sentry_client=Raven-Java/Test," + "sentry_key=" + publicKey + "," + "sentry_secret=" + secretKey)); } @Test public void testSuccessfulSendCallsDoSend(@Injectable final Event mockEvent) throws Exception { new MockUp<DummyAbstractConnection>() { @SuppressWarnings("unused") @Mock(invocations = 1) protected void doSend(Event event) throws ConnectionException { } }; abstractConnection.send(mockEvent); } @Test public void testExceptionOnSendStartLockDown(@Injectable final Event mockEvent) throws Exception { new MockUp<DummyAbstractConnection>() { @SuppressWarnings("unused") @Mock protected void doSend(Event event) throws ConnectionException { throw new ConnectionException(); } }; abstractConnection.send(mockEvent); verify(reentrantLock).tryLock(); verify(reentrantLock).unlock(); } private static final class DummyAbstractConnection extends AbstractConnection { public DummyAbstractConnection(String publicKey, String secretKey) { super(publicKey, secretKey); } @Override protected void doSend(Event event) throws ConnectionException { } @Override public void close() throws IOException { } } }
Use Tested rather than creating dummy implementations
raven/src/test/java/net/kencochrane/raven/connection/AbstractConnectionTest.java
Use Tested rather than creating dummy implementations
Java
bsd-3-clause
error: pathspec 'pig_scripts/TestFlattenAndFilterCompanies.java' did not match any file(s) known to git
1ad831ec0d618eb375d0a20493e8de20cad28b8d
1
fuzzy-id/midas,fuzzy-id/midas,fuzzy-id/midas
import java.io.File; import java.io.IOException; import org.apache.pig.pigunit.PigTest; import org.apache.pig.tools.parameters.ParseException; import org.junit.Test; public class TestFlattenAndFilterCompanies { private PigTest test; private static final String SCRIPT = "flatten_and_filter_companies.pig"; @Test public void testOnTestData() throws IOException, ParseException { String[] args = { "input=../test_data/crunchbase_companies", "output=flat_and_filtered", "wrapper=env", }; PigTest test = new PigTest(SCRIPT, args); test.assertOutput(new File("../test_data/companies.expected")); } }
pig_scripts/TestFlattenAndFilterCompanies.java
provide tests
pig_scripts/TestFlattenAndFilterCompanies.java
provide tests
Java
bsd-3-clause
error: pathspec 'src/main/java/com/michael_kuck/android/mkcommons/animation/FadingAnimator.java' did not match any file(s) known to git
d9c2d5ec51cd9993f3063359501b90e71976f095
1
mikumi/MKCommons-Android
/* * * * * This file is part of whereisthat-android * * * * Unless otherwise stated in a separate LICENSE file for this project * * or agreed via contract, all rights reserved by the author. * */ package com.michael_kuck.android.mkcommons.animation; import android.animation.Animator; import android.view.View; public class FadingAnimator { public static void fade(final View view, final float endValue, final int durationInMillis, final Runnable completion) { view.animate().alpha(endValue).setDuration(durationInMillis).setListener(new AnimatorCompletionListener() { @Override public void onAnimationEnd(final Animator animation) { if (completion != null) { completion.run(); } } }).start(); } public static void fadeInFromInvisible(final View view, final int durationInMillis, final Runnable completion) { view.setAlpha(0.0f); view.setVisibility(View.VISIBLE); fade(view, 1.0f, durationInMillis, completion); } public static void fadeOutToInvisible(final View view, final int durationInMillis, final Runnable completion) { fade(view, 0.0f, durationInMillis, new Runnable() { @Override public void run() { view.setVisibility(View.INVISIBLE); if (completion != null) { completion.run(); } } }); } public static void fadeInOut(final View view, final int animationDurationInMillis, final int showDurationInMillis, final Runnable completion) { fadeInFromInvisible(view, animationDurationInMillis, new Runnable() { @Override public void run() { view.postDelayed(new Runnable() { @Override public void run() { fadeOutToInvisible(view, animationDurationInMillis, completion); } }, showDurationInMillis); } }); } }
src/main/java/com/michael_kuck/android/mkcommons/animation/FadingAnimator.java
Added convenience animator for fade ins/outs
src/main/java/com/michael_kuck/android/mkcommons/animation/FadingAnimator.java
Added convenience animator for fade ins/outs
Java
mit
d39d1f49a8da64e86185c2c0b9ee237f55bb07ec
0
pedro-ribeiro/react-native-firestack,fullstackreact/react-native-firestack,sraka1/react-native-firestack,sraka1/react-native-firestack,fullstackreact/react-native-firestack,tegument/react-native-firestack,pedro-ribeiro/react-native-firestack,tegument/react-native-firestack,sraka1/react-native-firestack,pedro-ribeiro/react-native-firestack,devshackio/react-native-firestack,devshackio/react-native-firestack,devshackio/react-native-firestack,tegument/react-native-firestack,fullstackreact/react-native-firestack,tegument/react-native-firestack,pedro-ribeiro/react-native-firestack,sraka1/react-native-firestack,devshackio/react-native-firestack,fullstackreact/react-native-firestack
package io.fullstack.firestack; import android.content.Context; import android.util.Log; import java.util.Map; import android.net.Uri; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.WritableMap; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReadableNativeMap; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.facebook.react.bridge.ReactContext; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.Task; import com.google.firebase.FirebaseApp; import com.google.firebase.FirebaseOptions; import com.google.firebase.auth.AuthCredential; import com.google.firebase.auth.AuthResult; import com.google.firebase.auth.UserProfileChangeRequest; import com.google.firebase.auth.FacebookAuthProvider; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.auth.GetTokenResult; import com.google.firebase.auth.GoogleAuthProvider; class FirestackAuthModule extends ReactContextBaseJavaModule { private final int NO_CURRENT_USER = 100; private final int ERROR_FETCHING_TOKEN = 101; private static final String TAG = "FirestackAuth"; private Context context; private ReactContext mReactContext; private FirebaseAuth mAuth; private FirebaseApp app; private FirebaseUser user; private FirebaseAuth.AuthStateListener mAuthListener; public FirestackAuthModule(ReactApplicationContext reactContext) { super(reactContext); this.context = reactContext; mReactContext = reactContext; Log.d(TAG, "New FirestackAuth instance"); } @Override public String getName() { return TAG; } @ReactMethod public void listenForAuth() { mAuthListener = new FirebaseAuth.AuthStateListener() { @Override public void onAuthStateChanged(@NonNull FirebaseAuth firebaseAuth) { WritableMap msgMap = Arguments.createMap(); msgMap.putString("eventName", "listenForAuth"); if (user != null) { WritableMap userMap = getUserMap(); msgMap.putBoolean("authenticated", true); msgMap.putMap("user", userMap); FirestackUtils.sendEvent(mReactContext, "listenForAuth", msgMap); } else { msgMap.putBoolean("authenticated", false); FirestackUtils.sendEvent(mReactContext, "listenForAuth", msgMap); } } }; mAuth = FirebaseAuth.getInstance(); mAuth.addAuthStateListener(mAuthListener); } @ReactMethod public void unlistenForAuth(final Callback callback) { if (mAuthListener != null) { mAuth.removeAuthStateListener(mAuthListener); WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); callback.invoke(null, resp); } } @ReactMethod public void createUserWithEmail(final String email, final String password, final Callback onComplete) { mAuth = FirebaseAuth.getInstance(); mAuth.createUserWithEmailAndPassword(email, password) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, onComplete); }else{ userErrorCallback(task, onComplete); } } }); } @ReactMethod public void signInWithEmail(final String email, final String password, final Callback callback) { mAuth = FirebaseAuth.getInstance(); mAuth.signInWithEmailAndPassword(email, password) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, callback); } else { userErrorCallback(task, callback); } } }); } @ReactMethod public void signInWithProvider(final String provider, final String authToken, final String authSecret, final Callback callback) { if (provider.equals("facebook")) { this.facebookLogin(authToken,callback); } else // TODO FirestackUtils.todoNote(TAG, "signInWithProvider", callback); } @ReactMethod public void signInWithCustomToken(final String customToken, final Callback callback) { mAuth = FirebaseAuth.getInstance(); mAuth.signInWithCustomToken(customToken) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { Log.d(TAG, "signInWithCustomToken:onComplete:" + task.isSuccessful()); if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, callback); } else { userErrorCallback(task, callback); } } }); } @ReactMethod public void reauthenticateWithCredentialForProvider(final String provider, final String authToken, final String authSecret, final Callback callback) { // TODO: FirestackUtils.todoNote(TAG, "reauthenticateWithCredentialForProvider", callback); // AuthCredential credential; // Log.d(TAG, "reauthenticateWithCredentialForProvider called with: " + provider); } @ReactMethod public void updateUserEmail(final String email, final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); if (user != null) { user.updateEmail(email) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if (task.isSuccessful()) { Log.d(TAG, "User email address updated"); FirebaseUser u = FirebaseAuth.getInstance().getCurrentUser(); userCallback(u, callback); } else { userErrorCallback(task, callback); } } }); } else { WritableMap err = Arguments.createMap(); err.putInt("errorCode", NO_CURRENT_USER); err.putString("errorMessage", "No current user"); callback.invoke(err); } } @ReactMethod public void updateUserPassword(final String newPassword, final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); if (user != null) { user.updatePassword(newPassword) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if (task.isSuccessful()) { Log.d(TAG, "User password updated"); FirebaseUser u = FirebaseAuth.getInstance().getCurrentUser(); userCallback(u, callback); } else { userErrorCallback(task, callback); } } }); } else { WritableMap err = Arguments.createMap(); err.putInt("errorCode", NO_CURRENT_USER); err.putString("errorMessage", "No current user"); callback.invoke(err); } } @ReactMethod public void sendPasswordResetWithEmail(final String email, final Callback callback) { mAuth = FirebaseAuth.getInstance(); mAuth.sendPasswordResetEmail(email) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if(task.isSuccessful()){ WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); callback.invoke(null, resp); }else{ callback.invoke(task.getException().toString()); } } }); } @ReactMethod public void deleteUser(final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); if (user != null) { user.delete() .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if (task.isSuccessful()) { Log.d(TAG, "User account deleted"); WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); resp.putString("msg", "User account deleted"); callback.invoke(null, resp); } else { userErrorCallback(task, callback); } } }); } else { WritableMap err = Arguments.createMap(); err.putInt("errorCode", NO_CURRENT_USER); err.putString("errorMessage", "No current user"); callback.invoke(err); } } @ReactMethod public void getToken(final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); user.getToken(true) .addOnCompleteListener(new OnCompleteListener<GetTokenResult>() { @Override public void onComplete(@NonNull Task<GetTokenResult> task) { if (task.isSuccessful()) { String token = task.getResult().getToken(); WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); resp.putString("token", token); callback.invoke(null, resp); } else { WritableMap err = Arguments.createMap(); err.putInt("errorCode", ERROR_FETCHING_TOKEN); err.putString("errorMessage", task.getException().getMessage()); callback.invoke(err); } } }); } @ReactMethod public void updateUserProfile(ReadableMap props, final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); UserProfileChangeRequest.Builder profileBuilder = new UserProfileChangeRequest.Builder(); Map<String, Object> m = FirestackUtils.recursivelyDeconstructReadableMap(props); if (m.containsKey("displayName")) { String displayName = (String) m.get("displayName"); profileBuilder.setDisplayName(displayName); } if (m.containsKey("photoUri")) { String photoUriStr = (String) m.get("photoUri"); Uri uri = Uri.parse(photoUriStr); profileBuilder.setPhotoUri(uri); } UserProfileChangeRequest profileUpdates = profileBuilder.build(); user.updateProfile(profileUpdates) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if (task.isSuccessful()) { Log.d(TAG, "User profile updated"); FirebaseUser u = FirebaseAuth.getInstance().getCurrentUser(); userCallback(u, callback); } else { userErrorCallback(task, callback); } } }); } @ReactMethod public void signOut(final Callback callback) { FirebaseAuth.getInstance().signOut(); WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); resp.putString("msg", "User signed out"); callback.invoke(null, resp); } @ReactMethod public void getCurrentUser(final Callback callback) { mAuth = FirebaseAuth.getInstance(); user = mAuth.getCurrentUser(); if(user == null){ noUserCallback(callback); }else{ userCallback(user, callback); } } // TODO: Check these things @ReactMethod public void googleLogin(String IdToken, final Callback callback) { mAuth = FirebaseAuth.getInstance(); AuthCredential credential = GoogleAuthProvider.getCredential(IdToken, null); mAuth.signInWithCredential(credential) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, callback); }else{ userErrorCallback(task, callback); } } }); } @ReactMethod public void facebookLogin(String Token, final Callback callback) { mAuth = FirebaseAuth.getInstance(); AuthCredential credential = FacebookAuthProvider.getCredential(Token); mAuth.signInWithCredential(credential) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, callback); }else{ userErrorCallback(task, callback); } } }); } // Internal helpers public void userCallback(FirebaseUser passedUser, final Callback onComplete) { WritableMap userMap = getUserMap(); if (passedUser == null) { mAuth = FirebaseAuth.getInstance(); final FirebaseUser user = mAuth.getCurrentUser(); } else { final FirebaseUser user = passedUser; } user.getToken(true).addOnCompleteListener(new OnCompleteListener<GetTokenResult>() { @Override public void onComplete(@NonNull Task<GetTokenResult> task) { WritableMap userMap = Arguments.createMap(); final String token = task.getResult().getToken(); final String email = user.getEmail(); final String uid = user.getUid(); final String provider = user.getProviderId(); userMap.putString("token", token); userMap.putString("email", email); userMap.putString("uid", uid); userMap.putString("provider", provider); onComplete.invoke(null, userMap); } }); } public void noUserCallback(final Callback callback) { WritableMap message = Arguments.createMap(); message.putString("errorMessage", "no_user"); message.putString("eventName", "no_user"); message.putBoolean("authenticated", false); callback.invoke(null, message); } public void userErrorCallback(Task task, final Callback onFail) { WritableMap error = Arguments.createMap(); error.putInt("errorCode", task.getException().hashCode()); error.putString("errorMessage", task.getException().getMessage()); error.putString("allErrorMessage", task.getException().toString()); onFail.invoke(error); } private WritableMap getUserMap() { WritableMap userMap = Arguments.createMap(); FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); userMap.putString("email", user.getEmail()); userMap.putString("uid", user.getUid()); userMap.putString("provider", user.getProviderId()); return userMap; } }
android/src/main/java/io/fullstack/firestack/FirestackAuth.java
package io.fullstack.firestack; import android.content.Context; import android.util.Log; import java.util.Map; import android.net.Uri; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.WritableMap; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReadableNativeMap; import com.facebook.react.modules.core.DeviceEventManagerModule; import com.facebook.react.bridge.ReactContext; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.Task; import com.google.firebase.FirebaseApp; import com.google.firebase.FirebaseOptions; import com.google.firebase.auth.AuthCredential; import com.google.firebase.auth.AuthResult; import com.google.firebase.auth.UserProfileChangeRequest; import com.google.firebase.auth.FacebookAuthProvider; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.auth.GetTokenResult; import com.google.firebase.auth.GoogleAuthProvider; class FirestackAuthModule extends ReactContextBaseJavaModule { private final int NO_CURRENT_USER = 100; private final int ERROR_FETCHING_TOKEN = 101; private static final String TAG = "FirestackAuth"; private Context context; private ReactContext mReactContext; private FirebaseAuth mAuth; private FirebaseApp app; private FirebaseUser user; private FirebaseAuth.AuthStateListener mAuthListener; public FirestackAuthModule(ReactApplicationContext reactContext) { super(reactContext); this.context = reactContext; mReactContext = reactContext; Log.d(TAG, "New FirestackAuth instance"); } @Override public String getName() { return TAG; } @ReactMethod public void listenForAuth() { mAuthListener = new FirebaseAuth.AuthStateListener() { @Override public void onAuthStateChanged(@NonNull FirebaseAuth firebaseAuth) { WritableMap msgMap = Arguments.createMap(); msgMap.putString("eventName", "listenForAuth"); if (user != null) { WritableMap userMap = getUserMap(); msgMap.putBoolean("authenticated", true); msgMap.putMap("user", userMap); FirestackUtils.sendEvent(mReactContext, "listenForAuth", msgMap); } else { msgMap.putBoolean("authenticated", false); FirestackUtils.sendEvent(mReactContext, "listenForAuth", msgMap); } } }; mAuth = FirebaseAuth.getInstance(); mAuth.addAuthStateListener(mAuthListener); } @ReactMethod public void unlistenForAuth(final Callback callback) { if (mAuthListener != null) { mAuth.removeAuthStateListener(mAuthListener); WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); callback.invoke(null, resp); } } @ReactMethod public void createUserWithEmail(final String email, final String password, final Callback onComplete) { mAuth = FirebaseAuth.getInstance(); mAuth.createUserWithEmailAndPassword(email, password) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, onComplete); }else{ userErrorCallback(task, onComplete); } } }); } @ReactMethod public void signInWithEmail(final String email, final String password, final Callback callback) { mAuth = FirebaseAuth.getInstance(); mAuth.signInWithEmailAndPassword(email, password) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, callback); } else { userErrorCallback(task, callback); } } }); } @ReactMethod public void signInWithProvider(final String provider, final String authToken, final String authSecret, final Callback callback) { // TODO FirestackUtils.todoNote(TAG, "signInWithProvider", callback); } @ReactMethod public void signInWithCustomToken(final String customToken, final Callback callback) { mAuth = FirebaseAuth.getInstance(); mAuth.signInWithCustomToken(customToken) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { Log.d(TAG, "signInWithCustomToken:onComplete:" + task.isSuccessful()); if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, callback); } else { userErrorCallback(task, callback); } } }); } @ReactMethod public void reauthenticateWithCredentialForProvider(final String provider, final String authToken, final String authSecret, final Callback callback) { // TODO: FirestackUtils.todoNote(TAG, "reauthenticateWithCredentialForProvider", callback); // AuthCredential credential; // Log.d(TAG, "reauthenticateWithCredentialForProvider called with: " + provider); } @ReactMethod public void updateUserEmail(final String email, final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); if (user != null) { user.updateEmail(email) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if (task.isSuccessful()) { Log.d(TAG, "User email address updated"); FirebaseUser u = FirebaseAuth.getInstance().getCurrentUser(); userCallback(u, callback); } else { userErrorCallback(task, callback); } } }); } else { WritableMap err = Arguments.createMap(); err.putInt("errorCode", NO_CURRENT_USER); err.putString("errorMessage", "No current user"); callback.invoke(err); } } @ReactMethod public void updateUserPassword(final String newPassword, final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); if (user != null) { user.updatePassword(newPassword) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if (task.isSuccessful()) { Log.d(TAG, "User password updated"); FirebaseUser u = FirebaseAuth.getInstance().getCurrentUser(); userCallback(u, callback); } else { userErrorCallback(task, callback); } } }); } else { WritableMap err = Arguments.createMap(); err.putInt("errorCode", NO_CURRENT_USER); err.putString("errorMessage", "No current user"); callback.invoke(err); } } @ReactMethod public void sendPasswordResetWithEmail(final String email, final Callback callback) { mAuth = FirebaseAuth.getInstance(); mAuth.sendPasswordResetEmail(email) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if(task.isSuccessful()){ WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); callback.invoke(null, resp); }else{ callback.invoke(task.getException().toString()); } } }); } @ReactMethod public void deleteUser(final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); if (user != null) { user.delete() .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if (task.isSuccessful()) { Log.d(TAG, "User account deleted"); WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); resp.putString("msg", "User account deleted"); callback.invoke(null, resp); } else { userErrorCallback(task, callback); } } }); } else { WritableMap err = Arguments.createMap(); err.putInt("errorCode", NO_CURRENT_USER); err.putString("errorMessage", "No current user"); callback.invoke(err); } } @ReactMethod public void getToken(final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); user.getToken(true) .addOnCompleteListener(new OnCompleteListener<GetTokenResult>() { @Override public void onComplete(@NonNull Task<GetTokenResult> task) { if (task.isSuccessful()) { String token = task.getResult().getToken(); WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); resp.putString("token", token); callback.invoke(null, resp); } else { WritableMap err = Arguments.createMap(); err.putInt("errorCode", ERROR_FETCHING_TOKEN); err.putString("errorMessage", task.getException().getMessage()); callback.invoke(err); } } }); } @ReactMethod public void updateUserProfile(ReadableMap props, final Callback callback) { FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); UserProfileChangeRequest.Builder profileBuilder = new UserProfileChangeRequest.Builder(); Map<String, Object> m = FirestackUtils.recursivelyDeconstructReadableMap(props); if (m.containsKey("displayName")) { String displayName = (String) m.get("displayName"); profileBuilder.setDisplayName(displayName); } if (m.containsKey("photoUri")) { String photoUriStr = (String) m.get("photoUri"); Uri uri = Uri.parse(photoUriStr); profileBuilder.setPhotoUri(uri); } UserProfileChangeRequest profileUpdates = profileBuilder.build(); user.updateProfile(profileUpdates) .addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { if (task.isSuccessful()) { Log.d(TAG, "User profile updated"); FirebaseUser u = FirebaseAuth.getInstance().getCurrentUser(); userCallback(u, callback); } else { userErrorCallback(task, callback); } } }); } @ReactMethod public void signOut(final Callback callback) { FirebaseAuth.getInstance().signOut(); WritableMap resp = Arguments.createMap(); resp.putString("status", "complete"); resp.putString("msg", "User signed out"); callback.invoke(null, resp); } @ReactMethod public void getCurrentUser(final Callback callback) { mAuth = FirebaseAuth.getInstance(); user = mAuth.getCurrentUser(); if(user == null){ noUserCallback(callback); }else{ userCallback(user, callback); } } // TODO: Check these things @ReactMethod public void googleLogin(String IdToken, final Callback callback) { mAuth = FirebaseAuth.getInstance(); AuthCredential credential = GoogleAuthProvider.getCredential(IdToken, null); mAuth.signInWithCredential(credential) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, callback); }else{ userErrorCallback(task, callback); } } }); } @ReactMethod public void facebookLogin(String Token, final Callback callback) { mAuth = FirebaseAuth.getInstance(); AuthCredential credential = FacebookAuthProvider.getCredential(Token); mAuth.signInWithCredential(credential) .addOnCompleteListener(new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { user = task.getResult().getUser(); userCallback(user, callback); }else{ userErrorCallback(task, callback); } } }); } // Internal helpers public void userCallback(FirebaseUser passedUser, final Callback onComplete) { WritableMap userMap = getUserMap(); if (passedUser == null) { mAuth = FirebaseAuth.getInstance(); final FirebaseUser user = mAuth.getCurrentUser(); } else { final FirebaseUser user = passedUser; } user.getToken(true).addOnCompleteListener(new OnCompleteListener<GetTokenResult>() { @Override public void onComplete(@NonNull Task<GetTokenResult> task) { WritableMap userMap = Arguments.createMap(); final String token = task.getResult().getToken(); final String email = user.getEmail(); final String uid = user.getUid(); final String provider = user.getProviderId(); userMap.putString("token", token); userMap.putString("email", email); userMap.putString("uid", uid); userMap.putString("provider", provider); onComplete.invoke(null, userMap); } }); } public void noUserCallback(final Callback callback) { WritableMap message = Arguments.createMap(); message.putString("errorMessage", "no_user"); message.putString("eventName", "no_user"); message.putBoolean("authenticated", false); callback.invoke(null, message); } public void userErrorCallback(Task task, final Callback onFail) { WritableMap error = Arguments.createMap(); error.putInt("errorCode", task.getException().hashCode()); error.putString("errorMessage", task.getException().getMessage()); error.putString("allErrorMessage", task.getException().toString()); onFail.invoke(error); } private WritableMap getUserMap() { WritableMap userMap = Arguments.createMap(); FirebaseUser user = FirebaseAuth.getInstance().getCurrentUser(); userMap.putString("email", user.getEmail()); userMap.putString("uid", user.getUid()); userMap.putString("provider", user.getProviderId()); return userMap; } }
Facebook login fix android
android/src/main/java/io/fullstack/firestack/FirestackAuth.java
Facebook login fix android
Java
mit
d29551906e1c961bf1f0555e34e9d5e57e2c553d
0
Jpoliachik/react-native-navigation,holmesal/react-native-navigation,wix/react-native-navigation,iotize/react-native-navigation,brianjd/react-native-navigation,lakhman/react-native-navigation,brianjd/react-native-navigation,eeynard/react-native-navigation,luggit/react-native-navigation,yusufyildirim/react-native-navigation,yusufyildirim/react-native-navigation,guangmingzizai/react-native-navigation,chicojasl/react-native-navigation,Jpoliachik/react-native-navigation,lkj01010/react-native-navigation,inalist/react-native-navigation,BrendonSled/react-native-navigation,shahen94/react-native-navigation,kristoff-it/react-native-navigation,holmesal/react-native-navigation,brianjd/react-native-navigation,chicojasl/react-native-navigation,BrendonSled/react-native-navigation,thanhzusu/react-native-navigation,3sidedcube/react-native-navigation,guyca/react-native-navigation,InTeach/react-native-navigation,wix/react-native-navigation,varungupta85/react-native-navigation,iotize/react-native-navigation,snapme/react-native-navigation,guyca/react-native-navigation,uni-react/react-native-navigation,MediaMonksMobile/react-native-navigation,Ehesp/react-native-navigation,MattDavies/react-native-navigation,ceyhuno/react-native-navigation,lkj01010/react-native-navigation,MediaMonksMobile/react-native-navigation,okarakose/react-native-navigation,eeynard/react-native-navigation,wix/react-native-navigation,eeynard/react-native-navigation,lakhman/react-native-navigation,brianjd/react-native-navigation,inalist/react-native-navigation,ceyhuno/react-native-navigation,yusufyildirim/react-native-navigation,guangmingzizai/react-native-navigation,ceyhuno/react-native-navigation,thanhzusu/react-native-navigation,MediaMonksMobile/react-native-navigation,kristoff-it/react-native-navigation,okarakose/react-native-navigation,InTeach/react-native-navigation,yusufyildirim/react-native-navigation,luggit/react-native-navigation,coteries/react-native-navigation,kristoff-it/react-native-navigation,lkj01010/react-native-navigation,MattDavies/react-native-navigation,brianjd/react-native-navigation,junedomingo/react-native-navigation,3sidedcube/react-native-navigation,BrendonSled/react-native-navigation,wix/react-native-navigation,chicojasl/react-native-navigation,varungupta85/react-native-navigation,pqkluan/react-native-navigation,Jpoliachik/react-native-navigation,uni-react/react-native-navigation,snapme/react-native-navigation,inalist/react-native-navigation,kristoff-it/react-native-navigation,snapme/react-native-navigation,brianjd/react-native-navigation,uni-react/react-native-navigation,luggit/react-native-navigation,thanhzusu/react-native-navigation,varungupta85/react-native-navigation,guangmingzizai/react-native-navigation,Jpoliachik/react-native-navigation,thanhzusu/react-native-navigation,Ehesp/react-native-navigation,inalist/react-native-navigation,okarakose/react-native-navigation,InTeach/react-native-navigation,guyca/react-native-navigation,shahen94/react-native-navigation,Ehesp/react-native-navigation,pqkluan/react-native-navigation,junedomingo/react-native-navigation,guyca/react-native-navigation,ceyhuno/react-native-navigation,chicojasl/react-native-navigation,Ehesp/react-native-navigation,pqkluan/react-native-navigation,snapme/react-native-navigation,eeynard/react-native-navigation,thanhzusu/react-native-navigation,lakhman/react-native-navigation,holmesal/react-native-navigation,BrendonSled/react-native-navigation,MattDavies/react-native-navigation,guangmingzizai/react-native-navigation,thanhzusu/react-native-navigation,MattDavies/react-native-navigation,chicojasl/react-native-navigation,coteries/react-native-navigation,iotize/react-native-navigation,lakhman/react-native-navigation,Jpoliachik/react-native-navigation,junedomingo/react-native-navigation,MediaMonksMobile/react-native-navigation,shahen94/react-native-navigation,varungupta85/react-native-navigation,iotize/react-native-navigation,coteries/react-native-navigation,wix/react-native-navigation,holmesal/react-native-navigation,coteries/react-native-navigation,shahen94/react-native-navigation,luggit/react-native-navigation,InTeach/react-native-navigation,junedomingo/react-native-navigation,Jpoliachik/react-native-navigation,wix/react-native-navigation,pqkluan/react-native-navigation,chicojasl/react-native-navigation,3sidedcube/react-native-navigation,3sidedcube/react-native-navigation,uni-react/react-native-navigation,ceyhuno/react-native-navigation,ceyhuno/react-native-navigation,okarakose/react-native-navigation
package com.reactnativenavigation.controllers; import android.graphics.Color; import android.os.Bundle; import android.support.annotation.LayoutRes; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatActivity; import android.view.View; import com.reactnativenavigation.NavigationApplication; public abstract class SplashActivity extends AppCompatActivity { @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setSplashLayout(); if (NavigationApplication.instance.isReactContextInitialized()) { finish(); } else { NavigationApplication.instance.startReactContext(); } } @Override protected void onPause() { super.onPause(); finish(); } private void setSplashLayout() { final int splashLayout = getSplashLayout(); if (splashLayout > 0) { setContentView(splashLayout); } else { setContentView(createSplashLayout()); } } /** * @return xml layout res id */ @LayoutRes public int getSplashLayout() { return 0; } /** * @return the layout you would like to show while react's js context loads */ public View createSplashLayout() { View view = new View(this); view.setBackgroundColor(Color.WHITE); return view; } }
android/app/src/main/java/com/reactnativenavigation/controllers/SplashActivity.java
package com.reactnativenavigation.controllers; import android.graphics.Color; import android.os.Bundle; import android.support.annotation.LayoutRes; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatActivity; import android.view.View; import com.reactnativenavigation.NavigationApplication; public abstract class SplashActivity extends AppCompatActivity { @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setSplashLayout(); NavigationApplication.instance.startReactContext(); } @Override protected void onPause() { super.onPause(); finish(); } private void setSplashLayout() { final int splashLayout = getSplashLayout(); if (splashLayout > 0) { setContentView(splashLayout); } else { setContentView(createSplashLayout()); } } /** * @return xml layout res id */ @LayoutRes public int getSplashLayout() { return 0; } /** * @return the layout you would like to show while react's js context loads */ public View createSplashLayout() { View view = new View(this); view.setBackgroundColor(Color.WHITE); return view; } }
finish the splash if reactContext already running
android/app/src/main/java/com/reactnativenavigation/controllers/SplashActivity.java
finish the splash if reactContext already running
Java
mit
error: pathspec 'PowXN.java' did not match any file(s) known to git
4f9a4fb97c69451d837ae42619fdfa2507975ea5
1
AllyW/jvCode
/* Implement pow(x, n). */ import java.io.*; import java.util.*; public class PowXN { public static double myPow(double x, int n) { double res=1; while(n!=0) { if(n%2==0) { x=x*x; n/=2; } else { if(n>0) { res*=x; n--; } else { res/=x; n++; } } } return res; } public static void main(String[] args) { Scanner reader = new Scanner(System.in); System.out.println("Enter x: "); double x = reader.nextDouble(); System.out.println("Enter exp n: "); int n = reader.nextInt(); System.out.println("Result: " + myPow(x,n)); return; } }
PowXN.java
power of numbers - double x, half exp
PowXN.java
power of numbers - double x, half exp
Java
mit
error: pathspec 'SolutionDay5.java' did not match any file(s) known to git
04467b1bedbaf71d37d81d7ead017cc042bb6622
1
KapilRijhwani/HackerRank-30DaysOfCode,KapilRijhwani/HackerRank-30DaysOfCode
import java.util.Scanner; public class Solution { public static void main(String[] args) { Scanner in = new Scanner(System.in); int N = in.nextInt(); in.close(); for (int i = 1; i < 11; i++) { System.out.println(N + " x " + i + " = " + N * i); } } }
SolutionDay5.java
Day 5: Loops
SolutionDay5.java
Day 5: Loops
Java
mit
error: pathspec 'lab-13/Oppg_1.java' did not match any file(s) known to git
898943d1c8e31b8766c77802093a2eb7d4e3ad26
1
huxflux/lab
/* * GrafikkEksempelOv1JOGL.java */ import java.awt.*; // klassene Color og Graphics import javax.swing.*; // klassene JFrame og JPanel import java.util.*; import javax.media.opengl.*; //JOGL klasser import javax.media.opengl.glu.*; //glu klasser class Vindu extends JFrame { public Vindu(String tittel) { setTitle(tittel); setDefaultCloseOperation(EXIT_ON_CLOSE); TegningOv1_1JOGL tegningen = new TegningOv1_1JOGL(800, 640); add(tegningen); pack(); } } /*Klassen som inneholder main*/ class Oppg_1{ public static void main(String[] args) { Vindu etVindu = new Vindu("V2005 Oving 1: Enkel grafikk"); etVindu.setVisible(true); } } class TegningOv1_1JOGL extends JPanel implements GLEventListener{ private GLCanvas canvas; private float angle; private GLU glu = new GLU(); public TegningOv1_1JOGL(int width, int hight) { super(); GLCapabilities capabilities = new GLCapabilities(); capabilities.setHardwareAccelerated(true); //We want hardware acceleration capabilities.setDoubleBuffered(true); //And double buffering canvas = new GLCanvas(capabilities); canvas.addGLEventListener(this); this.add(canvas); this.setSize(width,hight); canvas.setSize(width,hight); //We want the JPanel and the GLCanvas to have the same size canvas.setVisible(true); //This is somehow necessary } public void init(GLAutoDrawable glDrawable) { GL gl = glDrawable.getGL(); //Get the GL object from glDrawable gl.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // Sets the background color to white gl.glMatrixMode(GL.GL_PROJECTION); // Select The Projection Matrix gl.glLoadIdentity(); // Reset the view matrix to the identity matrix glu.gluPerspective(60.0,1.0 , 1.0 ,9.0); // Spesifize the projection matrix (fov, w/h, near plane, far plane) gl.glMatrixMode(GL.GL_MODELVIEW); } public void reshape(GLAutoDrawable glDrawable, int i, int i1, int i2, int i3) { // Has to be implementet due to the GLEventListener interface } public void display(GLAutoDrawable glDrawable){ GL gl = glDrawable.getGL(); drawGLScene(glDrawable); glDrawable.swapBuffers(); gl.glFlush(); } public void displayChanged(GLAutoDrawable drawable, boolean modeChanged, boolean deviceChanged){ } public void drawGLScene(GLAutoDrawable glDrawable) { GL gl = glDrawable.getGL(); gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT); // and The Depth Buffer gl.glLoadIdentity(); gl.glTranslatef(-3.0f,0.0f,-8.0f); gl.glColor3f(1.0f,0.0f,0.0f); gl.glBegin(GL.GL_TRIANGLES); gl.glVertex3f(0.0f, 1.0f, 0.0f); gl.glVertex3f(-1.0f, -1.0f, 0.0f); gl.glVertex3f(1.0f,-1.0f,0.0f); gl.glEnd(); gl.glTranslatef(3.0f, 0.0f, 0.0f); gl.glColor3f(0.0f, 1.0f, 0.0f); gl.glBegin(GL.GL_TRIANGLES); gl.glVertex3f(-1.0f, -1.0f, 0.0f); gl.glVertex3f(-1.0f, 1.0f, 0.0f); gl.glVertex3f(1.0f, -1.0f, 0.0f); gl.glEnd(); gl.glBegin(GL.GL_TRIANGLES); gl.glVertex3f(-1.0f, 1.0f, 0.0f); gl.glVertex3f(1.0f, 1.0f, 0.0f); gl.glVertex3f(1.0f, -1.0f, 0.0f); gl.glEnd(); gl.glColor3f(0.0f, 0.0f, 1.0f); gl.glTranslatef(3.0f, 0.0f, 0.0f); gl.glBegin(GL.GL_LINE_LOOP); for(int i = 0; i < 360; i++){ double radianer = i*(Math.PI/180); gl.glVertex2f((float) Math.cos(radianer),(float) Math.sin(radianer)); } gl.glEnd(); } }
lab-13/Oppg_1.java
jatta
lab-13/Oppg_1.java
jatta
Java
mit
error: pathspec 'Java/Method/Fluent.java' did not match any file(s) known to git
6627850bcf356a8aabb3fa984330145e3cb3bea2
1
maniero/SOpt,maniero/SOpt,bigown/SOpt,bigown/SOpt,bigown/SOpt,bigown/SOpt,maniero/SOpt,bigown/SOpt,bigown/SOpt,maniero/SOpt,maniero/SOpt,maniero/SOpt,bigown/SOpt,maniero/SOpt,maniero/SOpt,maniero/SOpt,bigown/SOpt,maniero/SOpt,maniero/SOpt,maniero/SOpt,maniero/SOpt,bigown/SOpt,bigown/SOpt,maniero/SOpt,maniero/SOpt,maniero/SOpt
class Main { public static void main(String args []) { Texto t = new Texto("a b c d e"); t = t.adiciona(new Frase("x y")).adiciona(" f g h i"); System.out.println(t); } } class Frase { String frase; Frase(String frase) { this.frase = frase; } } class Texto { String t; Texto(String t) { this.t = t; } Texto adiciona(Frase fra) { t = t.concat(fra.frase); return this; } Texto adiciona(String s) { t = t.concat(s); return this; } public String toString() { return t; } } //https://pt.stackoverflow.com/q/542775/101
Java/Method/Fluent.java
https://pt.stackoverflow.com/q/542775/101
Java/Method/Fluent.java
https://pt.stackoverflow.com/q/542775/101
Java
mit
error: pathspec 'Java/NumberComplement.java' did not match any file(s) known to git
e89ddc7ea87143beff40b4c91b4d379ffd1b44c5
1
Vonzpf/LeetCode,Vonzpf/LeetCode
package LeetCode.Java; /** * Created by tank on 17/3/23. */ public class NumberComplement { public int findComplement(int num) { String binary = Integer.toBinaryString(num); int numberComplement = (int)((long)Math.pow(2, binary.length()) - 1 - num); return numberComplement; } }
Java/NumberComplement.java
476.Number Complement
Java/NumberComplement.java
476.Number Complement
Java
mit
error: pathspec 'src/Java/Q107020/Recur3.java' did not match any file(s) known to git
b8b88c8bf56b1e8fbc7b200a56621f2a4d40f13c
1
umyuu/Sample,umyuu/Sample,umyuu/Sample,umyuu/Sample
import java.util.Arrays; import java.util.Scanner; import java.util.stream.Collectors; import java.util.stream.Stream; public class Recur3 { static void recur3(int n) { if (n <= 0) { throw new UnsupportedOperationException(String.valueOf(n)); } // new int[100]で宣言する意味はないです。 int[] nstk = new int[n]; int[] sstk = new int[n]; int ptr = -1; int sw = 0; while (true) { if (n > 0) { ptr++; nstk[ptr] = n; sstk[ptr] = sw; if (sw == 0) n -= 1;// n = n - 1; else if (sw == 1) { n -= 2; sw = 0; } continue; } do { n = nstk[ptr]; sw = sstk[ptr] + 1; // 動作確認のために変数ptrと配列の内容をデバックプリント System.out.println(Stream.generate(() -> "-").limit(30).collect(Collectors.joining(""))); System.out.println(ptr); System.out.println(Arrays.toString(nstk) + ' ' + Arrays.toString(sstk)); ptr--; if (sw == 2) { System.out.println(n); if (ptr < 0) return; } } while (sw == 2); } } public static void main(String[] args) { try (Scanner sc = new Scanner(System.in)) { System.out.print("整数を入力せよ:"); int x = Integer.parseInt(sc.nextLine()); recur3(x); } } }
src/Java/Q107020/Recur3.java
Q107020
src/Java/Q107020/Recur3.java
Q107020
Java
mit
error: pathspec 'CECProject/src/cec/model/Search.java' did not match any file(s) known to git
fc7ff55877b7ada4bff03c63b6722c8810bb0b8b
1
amishwins/project-cec-2013,amishwins/project-cec-2013
package cec.model; import java.util.regex.Pattern; import java.util.regex.Matcher; public class Search { String source; String patternToFind; Search(String source, String patternToFind) { this.patternToFind = patternToFind; this.source = source; } private boolean isMatch() { Pattern pattern = Pattern.compile(patternToFind); Matcher matcher = pattern.matcher(source); return matcher.find(); } public boolean isEmailStringMatch() { return isMatch(); } }
CECProject/src/cec/model/Search.java
Search Feature
CECProject/src/cec/model/Search.java
Search Feature
Java
mit
error: pathspec 'codingbat/Warmup-1/parrotTrouble.java' did not match any file(s) known to git
ff35c63e8359ea0ed388af1a1aad7b9e9cd6a84e
1
controversial/APCS
public boolean parrotTrouble(boolean talking, int hour) { return (hour < 7 || hour > 20) && talking; }
codingbat/Warmup-1/parrotTrouble.java
Create parrotTrouble.java
codingbat/Warmup-1/parrotTrouble.java
Create parrotTrouble.java
Java
mit
error: pathspec 'src/gameengine/engine/GameBuilder.java' did not match any file(s) known to git
d147105f62f016ff1415c134fa44424d864d0ecf
1
amiyajima/voogasalad_VOOGirlsGeneration,mzhu22/TurnBasedStrategy
package gameengine.engine; import gamedata.gamecomponents.Level; import gamedata.gamecomponents.Patch; import gamedata.gamecomponents.Piece; import gameengine.player.Player; import java.util.List; import java.util.Map; public class GameBuilder { private Parser myParser; private List<Player> myPlayers; private List<Level> myLevels; private List<Patch> myPatches; private List<Piece> myPieces; public void getJSONFile(){ } /** * fills in myPlayers, myLevels, myPatches, and myPieces using a map, which contains * all the components of the chosen game, parsed by a Parser */ public void initiateComponents(Map<String, String> components){ } }
src/gameengine/engine/GameBuilder.java
created GameBuilder
src/gameengine/engine/GameBuilder.java
created GameBuilder
Java
mit
error: pathspec 'com/cinnamon/system/DestroyEventHandler.java' did not match any file(s) known to git
0163f5c8637761548d69bdc49e263f3a9f55c814
1
joltix/Cinnamon
package com.cinnamon.system; /** * Wrapper for an {@link EventHandler} of type {@link DestroyEvent}. */ public interface DestroyEventHandler extends EventHandler<DestroyEvent> { }
com/cinnamon/system/DestroyEventHandler.java
Wrapper for an EventHandler<DestroyEvent>
com/cinnamon/system/DestroyEventHandler.java
Wrapper for an EventHandler<DestroyEvent>
Java
mit
error: pathspec 'src/main/java/seedu/ezdo/logic/commands/SaveCommand.java' did not match any file(s) known to git
471645888bbed8e5e745eb7f725a75e39e73dd54
1
CS2103JAN2017-W14-B4/main,CS2103JAN2017-W14-B4/main
package seedu.ezdo.logic.commands; import java.io.File; import seedu.ezdo.commons.exceptions.IllegalValueException; import seedu.ezdo.logic.commands.exceptions.CommandException; /** * Changes the save location of ezDo. */ public class SaveCommand extends Command { public static final String COMMAND_WORD = "save"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Changes the save location of ezDo. " + "Parameters: DIRECTORYPATH \n" + "Example: " + COMMAND_WORD + " C:\\Users\\Tom\\Desktop"; public static final String MESSAGE_DIRECTORY_PATH_INVALID = "The directory path given is invalid."; public static final String MESSAGE_SAVE_TASK_SUCCESS = "New Save Location: %1$s"; private final String directoryPath; /** * Creates a SaveCommand using raw values. * * @throws IllegalValueException if the directory path is invalid */ public SaveCommand(String path) throws IllegalValueException { directoryPath = path; File directory = new File(path); if (directory.exists() == false && !directory.isDirectory()) { // THIS PART SEEMS TO NOT WORK VERY WELL. C:\\// works. TODO System.out.println("file.exists has error and not directory"); throw new IllegalValueException(MESSAGE_DIRECTORY_PATH_INVALID); } } // TODO @Override public CommandResult execute() throws CommandException { assert directoryPath != null; return new CommandResult(String.format(MESSAGE_SAVE_TASK_SUCCESS, directoryPath)); } }
src/main/java/seedu/ezdo/logic/commands/SaveCommand.java
Create SaveCommand.java
src/main/java/seedu/ezdo/logic/commands/SaveCommand.java
Create SaveCommand.java
Java
mit
error: pathspec 'src/main/java/com/leetcode/design/InsertDeleteGetRandom.java' did not match any file(s) known to git
813a3c51feb1933b6a14b7ec301f738568abc4bb
1
ramswaroop/Algorithms-and-Data-Structures-in-Java
package com.leetcode.design; import java.util.*; import static org.junit.jupiter.api.Assertions.*; /** * Level: Medium * Link: https://leetcode.com/problems/insert-delete-getrandom-o1/ * Description: * Design a data structure that supports all following operations in average O(1) time. * * insert(val): Inserts an item val to the set if not already present. * remove(val): Removes an item val from the set if present. * getRandom: Returns a random element from current set of elements. Each element must have the same probability of * being returned. * * Example: * * // Init an empty set. * RandomizedSet randomSet = new RandomizedSet(); * * // Inserts 1 to the set. Returns true as 1 was inserted successfully. * randomSet.insert(1); * * // Returns false as 2 does not exist in the set. * randomSet.remove(2); * * // Inserts 2 to the set, returns true. Set now contains [1,2]. * randomSet.insert(2); * * // getRandom should return either 1 or 2 randomly. * randomSet.getRandom(); * * // Removes 1 from the set, returns true. Set now contains [2]. * randomSet.remove(1); * * // 2 was already in the set, so return false. * randomSet.insert(2); * * // Since 2 is the only number in the set, getRandom always return 2. * randomSet.getRandom(); * * Runtime: <a href="https://leetcode.com/submissions/detail/250682053/>52 ms</a>. * * @author rampatra * @since 2019-08-11 */ public class InsertDeleteGetRandom { // store the val and its index (from the array list to make the remove O(1)) Map<Integer, Integer> valuesToIndexMap; List<Integer> values; Random random; InsertDeleteGetRandom() { valuesToIndexMap = new HashMap<>(); values = new ArrayList<>(); random = new Random(); } boolean insert(int val) { if (valuesToIndexMap.containsKey(val)) { return false; } valuesToIndexMap.put(val, values.size()); values.add(val); return true; } boolean remove(int val) { Integer index = valuesToIndexMap.get(val); if (index == null) { return false; } if (index != values.size() - 1) { int lastValue = values.get(values.size() - 1); values.set(index, lastValue); // replace the value with the last value valuesToIndexMap.put(lastValue, index); // update index in the map } values.remove(values.size() - 1); return valuesToIndexMap.remove(val, index); } int getRandom() { return values.get(random.nextInt(values.size())); } public static void main(String[] args) { InsertDeleteGetRandom randomizedSet = new InsertDeleteGetRandom(); assertTrue(randomizedSet.insert(2)); assertTrue(randomizedSet.insert(-1)); assertFalse(randomizedSet.remove(-10)); assertTrue(randomizedSet.remove(-1)); assertEquals(2, randomizedSet.getRandom()); assertTrue(randomizedSet.remove(2)); assertFalse(randomizedSet.remove(-2)); assertFalse(randomizedSet.remove(-20)); assertFalse(randomizedSet.remove(-30)); assertFalse(randomizedSet.remove(2)); assertFalse(randomizedSet.remove(1)); assertFalse(randomizedSet.remove(0)); } }
src/main/java/com/leetcode/design/InsertDeleteGetRandom.java
Insert Delete GetRandom O(1): done
src/main/java/com/leetcode/design/InsertDeleteGetRandom.java
Insert Delete GetRandom O(1): done
Java
mit
error: pathspec 'src/main/java/cz/sparko/javagrandejmh/v2/section2/CryptBench.java' did not match any file(s) known to git
7a4cefc943ef1663ddf821f5defce456a56bb2bf
1
sparkoo/Java-Grande-JMH
/************************************************************************** * * * Java Grande Forum Benchmark Suite - Version 2.0 * * * * produced by * * * * Java Grande Benchmarking Project * * * * at * * * * Edinburgh Parallel Computing Centre * * * * email: [email protected] * * * * * * This version copyright (c) The University of Edinburgh, 1999. * * All rights reserved. * * * **************************************************************************/ package cz.sparko.javagrandejmh.v2.section2; import org.junit.Assert; import org.openjdk.jmh.annotations.*; import java.util.Random; @State(Scope.Thread) public class CryptBench { @Param({"3000000", "20000000", "50000000"}) private int size; private byte[] plain1; // Buffer for plaintext data. private byte[] crypt1; // Buffer for encrypted data. private byte[] plain2; // Buffer for decrypted data. private short[] userkey; // Key for encryption/decryption. private int[] Z; // Encryption subkey (userkey derived). private int[] DK; // Decryption subkey (userkey derived). @Setup public void setUp() { buildTestData(); } @TearDown public void validate() { boolean error; for (int i = 0; i < size; i++) { error = (plain1[i] != plain2[i]); if (error) { System.out.println("Validation failed"); System.out.println("Original Byte " + i + " = " + plain1[i]); System.out.println("Encrypted Byte " + i + " = " + crypt1[i]); System.out.println("Decrypted Byte " + i + " = " + plain2[i]); Assert.fail(); } } } @Benchmark public void kernel() { cipher_idea(plain1, crypt1, Z); // Encrypt plain1. cipher_idea(crypt1, plain2, DK); // Decrypt. } private void buildTestData() { // Create three byte arrays that will be used (and reused) for // encryption/decryption operations. plain1 = new byte[size]; crypt1 = new byte[size]; plain2 = new byte[size]; Random rndnum = new Random(136506717L); // Create random number generator. // Allocate three arrays to hold keys: userkey is the 128-bit key. // Z is the set of 16-bit encryption subkeys derived from userkey, // while DK is the set of 16-bit decryption subkeys also derived // from userkey. NOTE: The 16-bit values are stored here in // 32-bit int arrays so that the values may be used in calculations // as if they are unsigned. Each 64-bit block of plaintext goes // through eight processing rounds involving six of the subkeys // then a final output transform with four of the keys; (8 * 6) // + 4 = 52 subkeys. userkey = new short[8]; // User key has 8 16-bit shorts. Z = new int[52]; // Encryption subkey (user key derived). DK = new int[52]; // Decryption subkey (user key derived). // Generate user key randomly; eight 16-bit values in an array. for (int i = 0; i < 8; i++) { // Again, the random number function returns int. Converting // to a short type preserves the bit pattern in the lower 16 // bits of the int and discards the rest. userkey[i] = (short) rndnum.nextInt(); } // Compute encryption and decryption subkeys. calcEncryptKey(); calcDecryptKey(); // Fill plain1 with "text." for (int i = 0; i < size; i++) { plain1[i] = (byte) i; // Converting to a byte // type preserves the bit pattern in the lower 8 bits of the // int and discards the rest. } } /** * Builds the 52 16-bit encryption subkeys Z[] from the user key and * stores in 32-bit int array. The routing corrects an error in the * source code in the Schnier book. Basically, the sense of the 7- * and 9-bit shifts are reversed. It still works reversed, but would * encrypted code would not decrypt with someone else's IDEA code. */ private void calcEncryptKey() { int j; // Utility variable. for (int i = 0; i < 52; i++) { // Zero out the 52-int Z array. Z[i] = 0; } for (int i = 0; i < 8; i++) { // First 8 subkeys are userkey itself. Z[i] = userkey[i] & 0xffff; // Convert "unsigned" // short to int. } // Each set of 8 subkeys thereafter is derived from left rotating // the whole 128-bit key 25 bits to left (once between each set of // eight keys and then before the last four). Instead of actually // rotating the whole key, this routine just grabs the 16 bits // that are 25 bits to the right of the corresponding subkey // eight positions below the current subkey. That 16-bit extent // straddles two array members, so bits are shifted left in one // member and right (with zero fill) in the other. For the last // two subkeys in any group of eight, those 16 bits start to // wrap around to the first two members of the previous eight. for (int i = 8; i < 52; i++) { j = i % 8; if (j < 6) { Z[i] = ((Z[i - 7] >>> 9) | (Z[i - 6] << 7)) // Shift and combine. & 0xFFFF; // Just 16 bits. continue; // Next iteration. } if (j == 6) { // Wrap to beginning for second chunk. Z[i] = ((Z[i - 7] >>> 9) | (Z[i - 14] << 7)) & 0xFFFF; continue; } // j == 7 so wrap to beginning for both chunks. Z[i] = ((Z[i - 15] >>> 9) | (Z[i - 14] << 7)) & 0xFFFF; } } /** * Builds the 52 16-bit encryption subkeys DK[] from the encryption-subkeys Z[]. DK[] is a 32-bit int array * holding 16-bit values as unsigned. */ private void calcDecryptKey() { int j, k; // Index counters. int t1, t2, t3; // Temps to hold decrypt subkeys. t1 = inv(Z[0]); // Multiplicative inverse (mod x10001). t2 = -Z[1] & 0xffff; // Additive inverse, 2nd encrypt subkey. t3 = -Z[2] & 0xffff; // Additive inverse, 3rd encrypt subkey. DK[51] = inv(Z[3]); // Multiplicative inverse (mod x10001). DK[50] = t3; DK[49] = t2; DK[48] = t1; j = 47; // Indices into temp and encrypt arrays. k = 4; for (int i = 0; i < 7; i++) { t1 = Z[k++]; DK[j--] = Z[k++]; DK[j--] = t1; t1 = inv(Z[k++]); t2 = -Z[k++] & 0xffff; t3 = -Z[k++] & 0xffff; DK[j--] = inv(Z[k++]); DK[j--] = t2; DK[j--] = t3; DK[j--] = t1; } t1 = Z[k++]; DK[j--] = Z[k++]; DK[j--] = t1; t1 = inv(Z[k++]); t2 = -Z[k++] & 0xffff; t3 = -Z[k++] & 0xffff; DK[j--] = inv(Z[k++]); DK[j--] = t3; DK[j--] = t2; DK[j--] = t1; } /** * IDEA encryption/decryption algorithm. It processes plaintext in * 64-bit blocks, one at a time, breaking the block into four 16-bit * unsigned subblocks. It goes through eight rounds of processing * using 6 new subkeys each time, plus four for last step. The source * text is in array text1, the destination text goes into array text2 * The routine represents 16-bit subblocks and subkeys as type int so * that they can be treated more easily as unsigned. Multiplication * modulo 0x10001 interprets a zero sub-block as 0x10000; it must to * fit in 16 bits. */ private void cipher_idea(byte[] text1, byte[] text2, int[] key) { int i1 = 0; // Index into first text array. int i2 = 0; // Index into second text array. int ik; // Index into key array. int x1, x2, x3, x4, t1, t2; // Four "16-bit" blocks, two temps. int r; // Eight rounds of processing. for (int i = 0; i < text1.length; i += 8) { ik = 0; // Restart key index. r = 8; // Eight rounds of processing. // Load eight plain1 bytes as four 16-bit "unsigned" integers. // Masking with 0xff prevents sign extension with cast to int. x1 = text1[i1++] & 0xff; // Build 16-bit x1 from 2 bytes, x1 |= (text1[i1++] & 0xff) << 8; // assuming low-order byte first. x2 = text1[i1++] & 0xff; x2 |= (text1[i1++] & 0xff) << 8; x3 = text1[i1++] & 0xff; x3 |= (text1[i1++] & 0xff) << 8; x4 = text1[i1++] & 0xff; x4 |= (text1[i1++] & 0xff) << 8; do { // 1) Multiply (modulo 0x10001), 1st text sub-block // with 1st key sub-block. x1 = (int) ((long) x1 * key[ik++] % 0x10001L & 0xffff); // 2) Add (modulo 0x10000), 2nd text sub-block // with 2nd key sub-block. x2 = x2 + key[ik++] & 0xffff; // 3) Add (modulo 0x10000), 3rd text sub-block // with 3rd key sub-block. x3 = x3 + key[ik++] & 0xffff; // 4) Multiply (modulo 0x10001), 4th text sub-block // with 4th key sub-block. x4 = (int) ((long) x4 * key[ik++] % 0x10001L & 0xffff); // 5) XOR results from steps 1 and 3. t2 = x1 ^ x3; // 6) XOR results from steps 2 and 4. // Included in step 8. // 7) Multiply (modulo 0x10001), result of step 5 // with 5th key sub-block. t2 = (int) ((long) t2 * key[ik++] % 0x10001L & 0xffff); // 8) Add (modulo 0x10000), results of steps 6 and 7. t1 = t2 + (x2 ^ x4) & 0xffff; // 9) Multiply (modulo 0x10001), result of step 8 // with 6th key sub-block. t1 = (int) ((long) t1 * key[ik++] % 0x10001L & 0xffff); // 10) Add (modulo 0x10000), results of steps 7 and 9. t2 = t1 + t2 & 0xffff; // 11) XOR results from steps 1 and 9. x1 ^= t1; // 14) XOR results from steps 4 and 10. (Out of order). x4 ^= t2; // 13) XOR results from steps 2 and 10. (Out of order). t2 ^= x2; // 12) XOR results from steps 3 and 9. (Out of order). x2 = x3 ^ t1; x3 = t2; // Results of x2 and x3 now swapped. } while (--r != 0); // Repeats seven more rounds. // Final output transform (4 steps). // 1) Multiply (modulo 0x10001), 1st text-block // with 1st key sub-block. x1 = (int) ((long) x1 * key[ik++] % 0x10001L & 0xffff); // 2) Add (modulo 0x10000), 2nd text sub-block // with 2nd key sub-block. It says x3, but that is to undo swap // of subblocks 2 and 3 in 8th processing round. x3 = x3 + key[ik++] & 0xffff; // 3) Add (modulo 0x10000), 3rd text sub-block // with 3rd key sub-block. It says x2, but that is to undo swap // of subblocks 2 and 3 in 8th processing round. x2 = x2 + key[ik++] & 0xffff; // 4) Multiply (modulo 0x10001), 4th text-block // with 4th key sub-block. x4 = (int) ((long) x4 * key[ik++] % 0x10001L & 0xffff); // Repackage from 16-bit sub-blocks to 8-bit byte array text2. text2[i2++] = (byte) x1; text2[i2++] = (byte) (x1 >>> 8); text2[i2++] = (byte) x3; // x3 and x2 are switched text2[i2++] = (byte) (x3 >>> 8); // only in name. text2[i2++] = (byte) x2; text2[i2++] = (byte) (x2 >>> 8); text2[i2++] = (byte) x4; text2[i2++] = (byte) (x4 >>> 8); } } /** * Compute multiplicative inverse of x, modulo (2**16)+1 using * extended Euclid's GCD (greatest common divisor) algorithm. * It is unrolled twice to avoid swapping the meaning of * the registers. And some subtracts are changed to adds. * Java: Though it uses signed 32-bit ints, the interpretation * of the bits within is strictly unsigned 16-bit. */ private int inv(int x) { int t0, t1; int q, y; if (x <= 1) { // Assumes positive x. return (x); // 0 and 1 are self-inverse. } t1 = 0x10001 / x; // (2**16+1)/x; x is >= 2, so fits 16 bits. y = 0x10001 % x; if (y == 1) { return ((1 - t1) & 0xFFFF); } t0 = 1; do { q = x / y; x = x % y; t0 += q * t1; if (x == 1) { return (t0); } q = y / x; y = y % x; t1 += q * t0; } while (y != 1); return ((1 - t1) & 0xFFFF); } }
src/main/java/cz/sparko/javagrandejmh/v2/section2/CryptBench.java
section2 CryptBench
src/main/java/cz/sparko/javagrandejmh/v2/section2/CryptBench.java
section2 CryptBench
Java
mit
error: pathspec 'src/cn/simastudio/charkey/codinginterview/MinNumberInRotateArray.java' did not match any file(s) known to git
641e68a929c3c425e8aa1299c19c3d1917c6567a
1
CharkeyQK/AlgorithmDataStructure
/* * Copyright (c) 2013-2015 Charkey. All rights reserved. * * This software is the confidential and proprietary information of Charkey. * You shall not disclose such Confidential Information and shall use it only * in accordance with the terms of the agreements you entered into with Charkey. * * Charkey MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF THE SOFTWARE, * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT. * * Charkey SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, * MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES. */ package cn.simastudio.charkey.codinginterview; /** * Created by Qikai on 2016/8/5. */ public class MinNumberInRotateArray { public int minNumberInRotateArray(int [] array) { int start = 0; int end = array.length - 1; while (start < end) { int index = start + (end - start) / 2; if (array[index] > array[end]) { start = index + 1; } else if (array[index] < array[end]) { end = index; } else if (array[index] == array[end]) { end = end - 1; } } return array[start]; } public static void main(String[] args) { int[] array = {7877,7878,7879,7880,7881,7882,7884,7884,7884,7885,7887,7888,7889,7891,7892,7893,7894,7895,7895,7896,7897,7897,7904,7904,7904,7905,7906,7907,7909,7910,7911,7912,7913,7916,7917,7918,7918,7918,7919,7919,7922,7924,7926,7926,7926,7930,7931,7935,7936,7937,7939,7939,7942,7943,7944,7946,7947,7949,7951,7960,7965,7965,7971,7973,7974,7976,7976,7977,7978,7978,7980,7981,7982,7983,7984,7986,7986,7994,7995,7996,7997,7998,7998,8000,8001,8008,8008,8009,8009,8010,8011,8011,8013,8013,8014,8016,8019,8020,8020,8022,8023,8026,8029,8033,8035,8036,8038,8039,8040,8040,8041,8047,8050,8053,8055,8057,8057,8058,8065,8065,8065,8066,8068,8068,8070,8074,8074,8074,8076,8077,8078,8079,8080,8082,8083,8084,8087,8090,8092,8092,8092,8094,8099,8099,8102,8107,8108,8109,8109,8114,8115,8116,8118,8120,8125,8125,8126,8127,8128,8130,8131,8133,8136,8137,8139,8142,8142,8146,8147,8147,8148,8149,8150,8153,8153,8154,8155,8158,8161,8162,8163,8167,8171,8173,8182,8183,8183,8185,8189,8190,8190,8191,8192,8193,8193,8194,8195,8196,8198,8198,8198,8198,8199,8199,8199,8199,8200,8200,8200,8201,8202,8205,8206,8207,8209,8210,8211,8211,8212,8214,8218,8219,8220,8220,8222,8223,8224,8224,8225,8225,8227,8229,8229,8234,8234,8237,8242,8253,8255,8256,8262,8265,8275,8277,8279,8283,8284,8285,8288,8289,8291,8295,8299,8299,8301,8301,8305,8307,8308,8308,8308,8313,8313,8314,8314,8317,8317,8320,8322,8322,8324,8327,8328,8329,8331,8331,8335,8338,8339,8340,8340,8343,8344,8344,8346,8346,8350,8353,8360,8360,8360,8360,8361,8362,8363,8366,8368,8369,8369,8372,8374,8374,8375,8379,8381,8386,8386,8389,8391,8394,8395,8397,8402,8403,8408,8411,8412,8414,8415,8416,8416,8416,8419,8420,8420,8420,8422,8423,8426,8426,8429,8434,8439,8443,8443,8444,8444,8445,8445,8446,8446,8447,8447,8449,8452,8453,8453,8456,8457,8461,8463,8464,8465,8467,8469,8469,8469,8469,8470,8473,8475,8476,8476,8477,8479,8482,8482,8485,8485,8489,8489,8490,8491,8492,8494,8494,8497,8499,8501,8501,8501,8504,8505,8506,8506,8512,8514,8514,8515,8517,8517,8518,8523,8524,8526,8529,8533,8534,8535,8535,8537,8538,8539,8541,8541,8542,8545,8545,8546,8546,8547,8549,8549,8550,8551,8552,8553,8553,8558,8561,8561,8567,8569,8575,8580,8582,8583,8584,8584,8585,8588,8591,8592,8596,8597,8597,8597,8599,8606,8608,8608,8610,8611,8613,8614,8616,8622,8622,8626,8628,8635,8639,8642,8644,8646,8647,8648,8651,8652,8654,8656,8656,8656,8658,8659,8661,8662,8663,8665,8665,8667,8668,8668,8669,8671,8675,8675,8680,8683,8684,8684,8685,8686,8686,8688,8688,8691,8693,8697,8699,8701,8703,8705,8707,8709,8710,8711,8712,8718,8718,8718,8725,8726,8727,8728,8728,8729,8731,8733,8733,8735,8737,8741,8744,8746,8747,8748,8748,8752,8753,8753,8754,8757,8757,8760,8761,8762,8764,8764,8766,8770,8771,8772,8774,8774,8775,8779,8780,8782,8782,8783,8783,8784,8786,8786,8786,8787,8788,8788,8790,8798,8798,8800,8801,8802,8804,8805,8812,8813,8815,8817,8820,8823,8824,8826,8829,8831,8833,8835,8836,8837,8840,8840,8842,8842,8847,8848,8848,8850,8853,8854,8854,8855,8856,8859,8861,8869,8875,8876,8878,8880,8881,8883,8884,8887,8887,8887,8888,8889,8890,8890,8893,8896,8898,8899,8900,8900,8901,8901,8903,8904,8904,8905,8906,8909,8909,8909,8911,8914,8915,8918,8919,8926,8929,8929,8930,8931,8934,8935,8935,8939,8939,8942,8942,8943,8943,8943,8944,8944,8945,8947,8953,8955,8955,8956,8958,8960,8962,8964,8964,8965,8966,8969,8972,8973,8979,8979,8986,8990,8992,8995,9002,9004,9005,9005,9008,9008,9010,9010,9013,9018,9018,9018,9019,9023,9026,9026,9031,9032,9033,9033,9034,9048,9056,9057,9058,9063,9063,9065,9066,9068,9068,9071,9072,9073,9073,9074,9079,9085,9085,9086,9088,9097,9098,9103,9103,9104,9108,9110,9111,9117,9117,9124,9126,9135,9143,9144,9144,9148,9150,9151,9158,9159,9161,9165,9165,9165,9167,9169,9169,9171,9172,9173,9184,9185,9185,9189,9189,9191,9192,9193,9194,9196,9196,9197,9197,9197,9197,9198,9200,9203,9203,9204,9205,9207,9210,9216,9218,9218,9221,9222,9226,9227,9229,9229,9229,9231,9235,9237,9240,9240,9241,9241,9242,9243,9245,9246,9246,9247,9247,9247,9248,9248,9250,9252,9252,9260,9261,9268,9272,9273,9275,9277,9278,9283,9285,9287,9288,9288,9288,9289,9290,9292,9292,9293,9294,9294,9294,9294,9295,9298,9304,9309,9312,9313,9313,9317,9317,9318,9318,9320,9322,9327,9329,9329,9330,9332,9333,9334,9339,9340,9341,9345,9348,9350,9351,9351,9353,9353,9354,9354,9356,9357,9357,9358,9360,9362,9365,9368,9371,9371,9372,9373,9374,9375,9376,9376,9377,9379,9381,9384,9385,9387,9387,9387,9388,9390,9391,9396,9399,9399,9403,9405,9408,9411,9412,9414,9414,9415,9416,9417,9419,9419,9421,9422,9423,9428,9428,9429,9429,9429,9430,9431,9432,9432,9435,9435,9435,9436,9437,9442,9443,9444,9445,9446,9450,9450,9451,9453,9456,9459,9461,9462,9462,9466,9466,9468,9470,9473,9474,9479,9480,9480,9481,9486,9486,9488,9491,9495,9496,9497,9499,9500,9500,9502,9503,9509,9510,9515,9518,9520,9524,9524,9533,9535,9536,9539,9539,9539,9540,9540,9542,9546,9547,9548,9550,9550,9551,9552,9553,9554,9555,9555,9557,9557,9557,9561,9562,9563,9564,9564,9565,9566,9567,9570,9570,9571,9571,9572,9573,9574,9576,9580,9581,9587,9588,9591,9591,9592,9593,9599,9599,9600,9600,9601,9602,9604,9605,9606,9606,9607,9613,9615,9622,9623,9625,9625,9625,9625,9629,9630,9630,9634,9635,9637,9639,9640,9641,9642,9644,9645,9649,9650,9650,9652,9653,9655,9655,9661,9661,9663,9663,9666,9668,9671,9671,9672,9673,9674,9680,9683,9686,9688,9690,9691,9691,9691,9692,9692,9697,9698,9700,9700,9701,9703,9704,9705,9705,9708,9710,9711,9712,9712,9713,9714,9714,9717,9717,9718,9722,9722,9726,9728,9728,9733,9737,9739,9740,9741,9743,9743,9745,9746,9746,9746,9747,9748,9748,9752,9754,9754,9755,9757,9757,9757,9761,9768,9774,9775,9779,9780,9782,9791,9793,9794,9803,9806,9806,9807,9807,9807,9815,9818,9818,9819,9823,9823,9827,9828,9828,9831,9833,9836,9837,9837,9848,9849,9852,9853,9860,9862,9866,9868,9870,9876,9880,9882,9882,9882,9883,9883,9886,9892,9895,9895,9900,9906,9907,9908,9911,9911,9914,9915,9915,9917,9919,9921,9921,9923,9926,9930,9931,9936,9939,9939,9939,9941,9943,9944,9947,9948,9949,9950,9952,9954,9954,9955,9955,9958,9958,9958,9960,9964,9969,9969,9970,9970,9971,9972,9973,9974,9975,9975,9977,9978,9979,9980,9981,9982,9982,9982,9985,9990,9990,9991,9994,9995,9997,9999,10000,10000,1,1,1,2,2,3,3,3,6,11,15,16,19,19,19,21,23,23,24,24,24,26,28,29,31,32,32,33,38,39,40,40,42,43,44,47,48,48,50,50,51,54,55,57,57,61,69,69,70,73,74,77,79,79,80,83,83,84,85,87,89,90,91,91,91,91,92,94,94,95,96,99,99,101,101,104,104,114,115,116,116,117,117,120,121,122,123,125,126,126,128,128,132,134,138,140,142,142,146,146,146,148,150,150,151,152,158,159,159,159,161,161,161,162,162,165,165,166,166,168,171,173,173,174,176,177,177,177,178,178,179,180,180,180,181,182,183,184,185,189,190,193,194,194,194,196,196,198,198,200,203,204,206,210,211,213,216,216,216,218,218,219,220,220,220,220,221,221,221,221,223,227,228,230,232,232,233,233,234,235,237,239,241,241,242,249,251,251,252,255,256,258,258,258,259,260,260,260,260,262,263,263,265,265,266,269,270,270,271,272,274,275,276,277,278,279,283,283,288,292,293,294,295,297,298,298,299,300,300,302,302,302,302,304,305,305,307,315,318,318,318,319,320,320,322,323,323,325,325,326,326,331,331,333,334,334,335,335,337,340,344,344,345,345,346,347,348,349,353,357,361,364,365,365,366,367,368,368,369,369,369,372,372,372,375,378,378,378,379,380,385,389,389,395,395}; System.out.println(new MinNumberInRotateArray().minNumberInRotateArray(array)); } }
src/cn/simastudio/charkey/codinginterview/MinNumberInRotateArray.java
minNumberInRotateArray
src/cn/simastudio/charkey/codinginterview/MinNumberInRotateArray.java
minNumberInRotateArray
Java
mit
error: pathspec 'src/main/java/com/github/aureliano/achmed/helper/EasterEggHelper.java' did not match any file(s) known to git
1b46c43931ac2f31ffd052782476e45951be4945
1
aureliano/achmed
package com.github.aureliano.achmed.helper; import java.util.Calendar; public final class EasterEggHelper { private EasterEggHelper() { throw new InstantiationError(this.getClass().getName() + " cannot be instantiated."); } public static String greeting() { Calendar calendar = Calendar.getInstance(); int hour = calendar.get(Calendar.HOUR_OF_DAY); String greetingMessage = null; if ((hour >= 5) && (hour < 12)) { greetingMessage = "morning"; } else if ((hour >= 12) && (hour < 18)) { greetingMessage = "afternoon"; } else if ((hour >= 18) && (hour < 21) || ((hour == 20) && (calendar.get(Calendar.MINUTE) <= 30))) { greetingMessage = "evening"; } else { greetingMessage = "night"; } return "Good " + greetingMessage + "... Infidel!"; } }
src/main/java/com/github/aureliano/achmed/helper/EasterEggHelper.java
Create easter egg helper with greeting method.
src/main/java/com/github/aureliano/achmed/helper/EasterEggHelper.java
Create easter egg helper with greeting method.
Java
mit
error: pathspec 'framework/core/src/main/java/edu/tamu/framework/config/CoreScheduleConfig.java' did not match any file(s) known to git
de39c7b8881532f1808572a8b6e189570f7e86a0
1
TAMULib/Weaver-Webservice-Core
/* * ScheduleConfig.java * * Version: * $Id$ * * Revisions: * $Log$ */ package edu.tamu.framework.config; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.annotation.SchedulingConfigurer; import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; import org.springframework.scheduling.config.ScheduledTaskRegistrar; /** * Schedule Configuration. * * @author * */ @Configuration @EnableScheduling public class CoreScheduleConfig implements SchedulingConfigurer { /** * Thread pool task scheduler bean. * * @return ThreadPoolTaskScheduler */ @Bean() public ThreadPoolTaskScheduler taskScheduler() { return new ThreadPoolTaskScheduler(); } /** * Configure task registrar. * * @param taskRegistrar ScheduledTaskRegistrar */ @Override public void configureTasks(ScheduledTaskRegistrar taskRegistrar) { taskRegistrar.setTaskScheduler(taskScheduler()); } }
framework/core/src/main/java/edu/tamu/framework/config/CoreScheduleConfig.java
Added schedule config.
framework/core/src/main/java/edu/tamu/framework/config/CoreScheduleConfig.java
Added schedule config.
Java
mit
error: pathspec '1-algorithm/13-leetcode/java/src/advanced/scan/twopointers/lc240_searcha2dmatrix2/Solution.java' did not match any file(s) known to git
bb99405b5c5b3669a336ee4a203b4efadbeff51c
1
cdai/interview
package advanced.scan.twopointers.lc240_searcha2dmatrix2; import java.util.Arrays; /** * Write an efficient algorithm that searches for a value in an m x n matrix. * This matrix has the following properties: * Integers in each row are sorted in ascending from left to right. * Integers in each column are sorted in ascending from top to bottom. * For example, consider the following matrix: * [ * [1, 4, 7, 11, 15], * [2, 5, 8, 12, 19], * [3, 6, 9, 16, 22], * [10, 13, 14, 17, 24], * [18, 21, 23, 26, 30] * ] * Given target = 5, return true. * Given target = 20, return false. */ public class Solution { public static void main(String[] args) { System.out.println(new Solution().searchMatrix(new int[][]{{-1, 3}}, 1)); } public boolean searchMatrix(int[][] matrix, int target) { int n = matrix.length; if (n == 0 || matrix[0].length == 0) { return false; } int m = matrix[0].length; for (int i = 0; i < n; i++) { if (matrix[i][m - 1] < target) { continue; } if (Arrays.binarySearch(matrix[i], target) != -1) { return true; } } return false; } }
1-algorithm/13-leetcode/java/src/advanced/scan/twopointers/lc240_searcha2dmatrix2/Solution.java
leetcode-240 search a 2D matrix 2
1-algorithm/13-leetcode/java/src/advanced/scan/twopointers/lc240_searcha2dmatrix2/Solution.java
leetcode-240 search a 2D matrix 2
Java
cc0-1.0
error: pathspec 'sort/binaryinsertionSort.java' did not match any file(s) known to git
1449d9718e8365c54c226c10d678817404e880d7
1
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
import java.util.Arrays; class GFG { public static void main(String[] args) { final int[] arr = {37, 23, 0, 17, 12, 72, 31, 46, 100, 88, 54 }; new GFG().sort(arr); for(int i=0; i<arr.length; i++) System.out.print(arr[i]+" "); } public void sort(int array[]) { for (int i = 1; i < array.length; i++) { int x = array[i]; // Find location to insert using binary search int j = Math.abs(Arrays.binarySearch(array, 0, i, x) + 1); //Shifting array to one location right System.arraycopy(array, j, array, j+1, i-j); //Placing element at its correct location array[j] = x; } } }
sort/binaryinsertionSort.java
binary insertion sort in java
sort/binaryinsertionSort.java
binary insertion sort in java
Java
epl-1.0
error: pathspec 'gbp-renderer/gbp-renderer-impl/src/test/java/org/opendaylight/nic/gbp/renderer/impl/GBPTenantPolicyCreatorTest.java' did not match any file(s) known to git
fd0ff32e0388a1ed6fdee966da372a065d3f3593
1
opendaylight/nic
/* * Copyright (c) 2015 NEC Corporation * All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this * distribution, and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.nic.gbp.renderer.impl; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.List; import com.google.common.base.Optional; import com.google.common.util.concurrent.CheckedFuture; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.powermock.api.mockito.PowerMockito; import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.reflect.Whitebox; import org.opendaylight.controller.md.sal.binding.api.DataBroker; import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction; import org.opendaylight.controller.md.sal.binding.api.WriteTransaction; import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType; import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException; import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.IpAddress; import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Ipv4Address; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.common.rev140421.EndpointGroupId; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.common.rev140421.L2BridgeDomainId; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.common.rev140421.L2FloodDomainId; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.common.rev140421.L3ContextId; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.common.rev140421.NetworkDomainId; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.common.rev140421.TenantId; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.endpoint.rev140421.endpoint.fields.L3Address; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.endpoint.rev140421.endpoint.fields.L3AddressBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.endpoint.rev140421.Endpoints; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.endpoint.rev140421.EndpointsBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.endpoint.rev140421.endpoints.Endpoint; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.endpoint.rev140421.endpoints.EndpointBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.policy.rev140421.tenants.TenantBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.policy.rev140421.tenants.tenant.Contract; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.policy.rev140421.tenants.tenant.EndpointGroup; import org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.policy.rev140421.tenants.tenant.Subnet; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.Actions; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.ActionsBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.actions.action.Allow; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.actions.action.AllowBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.actions.action.Block; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.actions.action.BlockBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.Subjects; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.SubjectsBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.Subject; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.EndPointGroupSelector; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.EndPointSelector; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.end.point.group.EndPointGroup; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.end.point.group.EndPointGroupBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intents.Intent; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intents.IntentBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intents.IntentKey; import org.opendaylight.yang.gen.v1.urn.opendaylight.intent.types.rev150122.Uuid; import org.opendaylight.yangtools.yang.binding.InstanceIdentifier; /** * JUnit test for {@link GBPTenantPolicyCreator}. * * GBPTenantPolicyCreator test class is to test Group based Policy tenant policy * creator takes an intent and converts it to a tenant policy which then gets * pushed into the config datastore for the groupbasedpolicy rendering to the * appropriate network devices. */ @RunWith(PowerMockRunner.class) public class GBPTenantPolicyCreatorTest { /** * Mock object for DataBroker. */ @Mock private DataBroker mockBroker; /** * Mock object for Intent. */ @Mock private Intent mockIntent; /** * Mock object for ReadOnlyTransaction. */ @Mock private ReadOnlyTransaction mockReadTransaction; /** * Mock object for WriteTransaction. */ @Mock private WriteTransaction mockWriteTransaction; /** * Mock object for Optional. */ @Mock private Optional<Endpoints> mockOptionalDataObject; /** * Mock object for CheckedFuture. */ @Mock private CheckedFuture<Optional<Endpoints>, ReadFailedException> mockFuture; /** * Mock object for CheckedFuture. */ @Mock private CheckedFuture<Void, TransactionCommitFailedException> mockCheckedFuture; /** * String declaration for UniqueId. */ private static final String UNIQUE_ID = "891fc7a8-cca7-45ee-9128-3294b96307d0"; /** * String declaration for expected Subject. */ private static final String EXPECT_SUBJECT = "s1"; /** * String declaration for expected Consumer. */ private static final String EXPECT_CONSUMER = "cns1"; /** * String declaration for expected Provider. */ private static final String EXPECT_PROVIDER = "pns1"; /** * String declaration for IP Address. */ private static final String TEST_IP = "192.168.196.3"; /** * String declaration for actual object. */ private static String actual; /** * Intent object reference. */ private Intent intent; /** * IntentKey object reference. */ private IntentKey intentKey; /** * Allow object reference. */ private Allow allow; /** * Block object reference. */ private Block block; /** * Actions object reference for Allow action. */ private Actions actionAllow; /** * Actions object reference for Block action. */ private Actions actionBlock; /** * Subjects object reference for source. */ private Subjects srcsubject; /** * Subjects object reference for destination. */ private Subjects destsubject; /** * Declare list of Actions. */ final List<Actions> actionlist = new ArrayList<Actions>(); /** * Declare list of subjects. */ private List<Subjects> subjectslist; /** * Declare list of L3Address. */ private List<L3Address> l3List; /** * GBPTenantPolicyCreator object to perform unit testing. */ private GBPTenantPolicyCreator gbpTenantPolicyCreator; /** * This method creates the required objects to perform unit testing. */ @Before public void setUp() throws Exception { intentKey = new IntentKey(new Uuid(UNIQUE_ID)); allow = new AllowBuilder().build(); block = new BlockBuilder().build(); actionAllow = new ActionsBuilder().setAction(allow).build(); actionBlock = new ActionsBuilder().setAction(block).build(); actionlist.add(actionAllow); EndPointGroup src = new EndPointGroupBuilder().setName(UNIQUE_ID) .build(); EndPointGroup dest = new EndPointGroupBuilder().setName(UNIQUE_ID) .build(); Subject srcAddress = new org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.EndPointGroupBuilder() .setEndPointGroup(src).build(); Subject destAddress = new org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.EndPointGroupBuilder() .setEndPointGroup(dest).build(); srcsubject = new SubjectsBuilder().setSubject(srcAddress).build(); destsubject = new SubjectsBuilder().setSubject(destAddress).build(); subjectslist = new ArrayList<Subjects>(); subjectslist.add(srcsubject); subjectslist.add(destsubject); intent = new IntentBuilder().setKey(intentKey) .setActions(actionlist).setSubjects(subjectslist).build(); gbpTenantPolicyCreator = new GBPTenantPolicyCreator(mockBroker, intent); gbpTenantPolicyCreator = PowerMockito.spy(gbpTenantPolicyCreator); EndpointGroupId endpointGrpSrc = new EndpointGroupId(UNIQUE_ID); TenantId tenantId = new TenantId(UNIQUE_ID); L2BridgeDomainId l2bdId = new L2BridgeDomainId(UNIQUE_ID); Ipv4Address TEST_IPV4 = new Ipv4Address(TEST_IP); L3ContextId l3cntxtId = new L3ContextId(UNIQUE_ID); L3Address l3Address = new L3AddressBuilder().setL3Context(l3cntxtId) .setIpAddress(new IpAddress(TEST_IPV4)).build(); l3List = new ArrayList<L3Address>(); l3List.add(l3Address); Endpoint endpointSrc = new EndpointBuilder() .setEndpointGroup(endpointGrpSrc).setTenant(tenantId) .setL2Context(l2bdId).setL3Address(l3List).build(); final List<Endpoint> endPointList = new ArrayList<Endpoint>(); endPointList.add(endpointSrc); Endpoints endPoints = new EndpointsBuilder().setEndpoint(endPointList) .build(); InstanceIdentifier<Endpoints> instanceIdentifier = InstanceIdentifier .builder(Endpoints.class).build(); when(mockBroker.newReadOnlyTransaction()).thenReturn( mockReadTransaction); when( mockReadTransaction.read(LogicalDatastoreType.OPERATIONAL, instanceIdentifier)).thenReturn(mockFuture); when(mockFuture.checkedGet()).thenReturn(mockOptionalDataObject); when(mockOptionalDataObject.isPresent()).thenReturn(true); when(mockOptionalDataObject.get()).thenReturn(endPoints); when(mockBroker.newWriteOnlyTransaction()).thenReturn( mockWriteTransaction); when(mockWriteTransaction.submit()).thenReturn(mockCheckedFuture); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying the Group based Policy tenant policy creator takes an intent * and converts it to a tenant policy which then * gets pushed into the config datastore for the groupbasedpolicy rendering * to the appropriate network devices. * */ @Test public void testProcessIntentToGBP() throws Exception { gbpTenantPolicyCreator.processIntentToGBP(); verify(mockBroker, times(2)).newReadOnlyTransaction(); verify(mockBroker).newWriteOnlyTransaction(); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying the Group based Policy tenant policy creator takes an intent * with action type of ALLOW and converts it to a tenant policy which then * gets pushed into the config datastore for the groupbasedpolicy rendering * to the appropriate network devices. * */ @Test public void testGetAllowSubject() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the getAllowSubject method * in GBPTenantPolicyCreator. */ PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getAllowSubject"); org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.policy.rev140421.tenants.tenant.contract.Subject actual = Whitebox .invokeMethod(gbpTenantPolicyCreator, "getAllowSubject"); assertEquals(EXPECT_SUBJECT, actual.getName().getValue()); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying the Group based Policy tenant policy creator takes an intent * with action type of BLOCK and converts it to a tenant policy which then * gets pushed into the config datastore for the groupbasedpolicy rendering * to the appropriate network devices. * */ @Test public void testGetBlockSubject() throws Exception { actionlist.set(0, actionBlock); Intent intent = new IntentBuilder().setKey(intentKey) .setActions(actionlist).setSubjects(subjectslist).build(); gbpTenantPolicyCreator = new GBPTenantPolicyCreator(mockBroker, intent); gbpTenantPolicyCreator.processIntentToGBP(); gbpTenantPolicyCreator = PowerMockito.spy(gbpTenantPolicyCreator); /** * PowerMockito.verifyPrivate() is verifying the getBlockSubject method * in GBPTenantPolicyCreator. */ PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getBlockSubject"); org.opendaylight.yang.gen.v1.urn.opendaylight.groupbasedpolicy.policy.rev140421.tenants.tenant.contract.Subject actual = Whitebox .invokeMethod(gbpTenantPolicyCreator, "getBlockSubject"); assertEquals(EXPECT_SUBJECT, actual.getName().getValue()); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying Tenant is created or not. */ @Test public void testGetTenant() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the getTenant method in * GBPTenantPolicyCreator. */ TenantBuilder builder = Whitebox.invokeMethod(gbpTenantPolicyCreator, "getTenant"); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getTenant"); final String actual_consumer = builder.getEndpointGroup().get(0).getConsumerNamedSelector().get(0).getName().getValue(); final String actual_Provider = builder.getEndpointGroup().get(1).getProviderNamedSelector().get(0).getName().getValue(); assertEquals(EXPECT_CONSUMER, actual_consumer); assertEquals(EXPECT_PROVIDER, actual_Provider); assertEquals(UNIQUE_ID, builder.getId().getValue()); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying getDefaultContract method is invoked or not in * GbpTenantPolicyCreator. */ @Test public void testgetDefaultContract() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the getDefaultContract * method in GBPTenantPolicyCreator. */ Contract mockContract = Whitebox.invokeMethod(gbpTenantPolicyCreator, "getDefaultContract"); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getDefaultContract"); assertEquals(EXPECT_SUBJECT, mockContract.getSubject().get(0).getName() .getValue()); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying getEndpointIdentifier method is invoked or not in * GbpTenantPolicyCreator. */ @Test public void testGetEndpointIdentifier() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the getEndpointIdentifier * method in GBPTenantPolicyCreator. */ actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "getEndpointIdentifier", srcsubject); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getEndpointIdentifier", srcsubject); assertEquals(UNIQUE_ID, actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()}. * * Verifying getEndpointIdentifier method is invoked or not if subject is EndPointGroup in. * GbpTenantPolicyCreator. */ @Test public void testGetEndpointIdentifierForEndPointGroup() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the getEndpointIdentifier * method in GBPTenantPolicyCreator. */ actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "getEndpointIdentifier", srcsubject); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getEndpointIdentifier", srcsubject); assertEquals(UNIQUE_ID, actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()}. * * Verifying getEndpointIdentifier method is invoked or not if subject is EndPointSelector in * GbpTenantPolicyCreator. */ @Test public void testGetEndpointIdentifierForEndPointSelector() throws Exception { org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.end.point.selector.EndPointSelector selector = mock(org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.end.point.selector.EndPointSelector.class); EndPointSelector EndPointSelector = mock(EndPointSelector.class); when(selector.getEndPointSelector()).thenReturn(UNIQUE_ID); when(EndPointSelector.getEndPointSelector()).thenReturn(selector); org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.Subjects subject = mock(org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.Subjects.class); when(subject.getSubject()).thenReturn(EndPointSelector); /** * PowerMockito.verifyPrivate() is verifying the getEndpointIdentifier * method in GBPTenantPolicyCreator. */ final String actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "getEndpointIdentifier", subject); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getEndpointIdentifier", subject); assertEquals(UNIQUE_ID, actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()}. * * Verifying getEndpointIdentifier method is invoked or not if subject is EndPointGroupSelector in * GbpTenantPolicyCreator. */ @Test public void testGetEndpointIdentifierForEndPointGroupSelector() throws Exception { org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.end.point.group.selector.EndPointGroupSelector grpSelector = mock(org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.end.point.group.selector.EndPointGroupSelector.class); EndPointGroupSelector EndPointGroupSelector = mock(EndPointGroupSelector.class); when(grpSelector.getEndPointGroupSelector()).thenReturn(UNIQUE_ID); when(EndPointGroupSelector.getEndPointGroupSelector()).thenReturn(grpSelector); org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.Subjects subject = mock(org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.Subjects.class); when(subject.getSubject()).thenReturn(EndPointGroupSelector); /** * PowerMockito.verifyPrivate() is verifying the getEndpointIdentifier * method in GBPTenantPolicyCreator. */ PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getEndpointIdentifier", subject); actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "getEndpointIdentifier", subject); assertEquals(UNIQUE_ID, actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying getEndpointIdentifier method is invoked or not in * GbpTenantPolicyCreator. */ @Test public void testGetEndpointIdentifierNull() throws Exception { org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.Subjects subject = mock(org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.Subjects.class); /** * PowerMockito.verifyPrivate() is verifying the getEndpointIdentifier * method in GBPTenantPolicyCreator. */ actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "getEndpointIdentifier", subject); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getEndpointIdentifier", subject); assertEquals("", actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying createSubNet method is invoked or not in * GbpTenantPolicyCreator and checks IP Address. */ @Test public void testCreateSubnet() throws Exception { final String expectedIp = "192.168.196.1"; final String expectedIpPrefix = "192.168.196.1/24"; final NetworkDomainId networkDomainId = new NetworkDomainId(UNIQUE_ID); final L2FloodDomainId floodDomainId = new L2FloodDomainId(UNIQUE_ID); /** * PowerMockito.verifyPrivate() is verifying the createSubnet method in * GBPTenantPolicyCreator. */ Subnet mockSubnet = Whitebox.invokeMethod(gbpTenantPolicyCreator, "createSubnet", l3List, networkDomainId, floodDomainId); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "createSubnet", l3List, networkDomainId, floodDomainId); assertEquals(expectedIp, mockSubnet.getVirtualRouterIp() .getIpv4Address().getValue()); assertEquals(expectedIpPrefix, mockSubnet.getIpPrefix().getIpv4Prefix().getValue()); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()}. * * Verifying the list of endpoints that matches an intent subject id or not */ @Test public void testReadEPNodes() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the readEPNodes method in * GBPTenantPolicyCreator. */ List<Endpoint> endPointList = Whitebox.invokeMethod(gbpTenantPolicyCreator, "readEPNodes", UNIQUE_ID); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "readEPNodes", UNIQUE_ID); assertEquals(UNIQUE_ID, endPointList.get(0).getTenant().getValue()); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * verifying the intent having id,actions and subjects or not. * */ @Test public void testVerifyIntent() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the verifyIntent method in * GBPTenantPolicyCreator. */ boolean actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "verifyIntent"); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "verifyIntent"); assertTrue(actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()}. * * verifying the intent having id or not. * */ @Test public void testVerifyIntentId() throws Exception { GBPTenantPolicyCreator gbpTPCId = new GBPTenantPolicyCreator( mockBroker, mockIntent); gbpTPCId.processIntentToGBP(); /** * PowerMockito.verifyPrivate() is verifying the verifyIntent method in * GBPTenantPolicyCreator. */ gbpTenantPolicyCreator = PowerMockito.spy(gbpTPCId); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "verifyIntent"); boolean actual = Whitebox.invokeMethod(gbpTPCId, "verifyIntent"); assertFalse(actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()}. * * verifying the intent having actions or not. * */ @Test public void testVerifyIntentAction() throws Exception { when(mockIntent.getId()).thenReturn(mock(Uuid.class)); when(mockIntent.getActions()).thenReturn(null); GBPTenantPolicyCreator gbpTPCAction = new GBPTenantPolicyCreator( mockBroker, mockIntent); gbpTenantPolicyCreator = PowerMockito.spy(gbpTPCAction); boolean actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "verifyIntent"); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "verifyIntent"); assertFalse(actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()}. * * verifying the intent having subjects or not. * */ @Test public void testVerifyIntentSubject() throws Exception { when(mockIntent.getId()).thenReturn(mock(Uuid.class)); when(mockIntent.getActions()).thenReturn(actionlist); when(mockIntent.getSubjects()).thenReturn(null); GBPTenantPolicyCreator gbpTPCSubject = new GBPTenantPolicyCreator( mockBroker, mockIntent); gbpTenantPolicyCreator = PowerMockito.spy(gbpTPCSubject); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "verifyIntent"); boolean actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "verifyIntent"); assertFalse(actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * verify the matching end points available or not for a particular Subject * ID. */ @Test public void testGetTenantEndpointAttributesFail() throws Exception { EndPointGroup src = new EndPointGroupBuilder().setName( "d2d86574-2d97-419e-a7e2-e1042249629c").build(); EndPointGroup dest = new EndPointGroupBuilder().setName( "d2d86574-2d97-419e-a7e2-e1042249629c").build(); Subject srcAddress = new org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.EndPointGroupBuilder() .setEndPointGroup(src).build(); Subject destAddress = new org.opendaylight.yang.gen.v1.urn.opendaylight.intent.rev150122.intent.subjects.subject.EndPointGroupBuilder() .setEndPointGroup(dest).build(); srcsubject = new SubjectsBuilder().setSubject(srcAddress).build(); destsubject = new SubjectsBuilder().setSubject(destAddress).build(); subjectslist = new ArrayList<Subjects>(); subjectslist.add(srcsubject); subjectslist.add(destsubject); Intent intent = new IntentBuilder().setKey(intentKey) .setActions(actionlist).setSubjects(subjectslist).build(); gbpTenantPolicyCreator = new GBPTenantPolicyCreator(mockBroker, intent); gbpTenantPolicyCreator.processIntentToGBP(); gbpTenantPolicyCreator = PowerMockito.spy(gbpTenantPolicyCreator); /** * PowerMockito.verifyPrivate() is verifying the * getTenantEndpointAttributes method in GBPTenantPolicyCreator. */ PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getTenantEndpointAttributes", "", true); /** * when subject Id is null then method should return flase. */ boolean actual = Whitebox.invokeMethod(gbpTenantPolicyCreator,"getTenantEndpointAttributes", "", true); assertFalse(actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * verify the matching end points available or not for a particular Subject * ID. */ @Test public void testGetTenantEndpointAttributes() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the * getTenantEndpointAttributes method in GBPTenantPolicyCreator. */ PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "getTenantEndpointAttributes", UNIQUE_ID, true); /** * when subject Id is valid then method should return true. */ boolean actual = Whitebox.invokeMethod(gbpTenantPolicyCreator, "getTenantEndpointAttributes", UNIQUE_ID, true); assertTrue(actual); } /** * Test method for * {@link org.opendaylight.nic.gbp.renderer.impl.GBPTenantPolicyCreator#processIntentToGBP()} * . * * Verifying createEndpointGroups method is invoked or not in * GbpTenantPolicyCreator. */ @Test public void testcreateEndpointGroups() throws Exception { /** * PowerMockito.verifyPrivate() is verifying the * getTenantEndpointAttributes method in GBPTenantPolicyCreator. */ List<EndpointGroup> endpointGroups = Whitebox.invokeMethod(gbpTenantPolicyCreator, "createEndpointGroups"); PowerMockito.verifyPrivate(gbpTenantPolicyCreator).invoke( "createEndpointGroups"); assertEquals(UNIQUE_ID, endpointGroups.get(0).getId().getValue()); } }
gbp-renderer/gbp-renderer-impl/src/test/java/org/opendaylight/nic/gbp/renderer/impl/GBPTenantPolicyCreatorTest.java
UnitTest implementation added for GBPTenantPolicyCreator in GBP-Renderer-Impl module. Change-Id: I10a95027a4c3de62286a482d3130415b7b14a937 Signed-off-by: hari.pr <[email protected]>
gbp-renderer/gbp-renderer-impl/src/test/java/org/opendaylight/nic/gbp/renderer/impl/GBPTenantPolicyCreatorTest.java
UnitTest implementation added for GBPTenantPolicyCreator in GBP-Renderer-Impl module.
Java
agpl-3.0
error: pathspec 'lucas-ejb/ejbModule/florian_haas/lucas/database/EnumQueryComparator.java' did not match any file(s) known to git
8b50d602b8e703c8143ba132dff72162882c951a
1
Listopia-Official/listopia-user-and-company-administration-system,Listopia-Official/listopia-user-and-company-administration-system,Listopia-Official/listopia-user-and-company-administration-system
package florian_haas.lucas.database; public enum EnumQueryComparator { EQUAL, NOT_EQUAL, GREATHER_THAN, LESS_THAN, GREATHER_EQUAL, LESS_EQUAL, IN, NOT_IN, LIKE, NOT_LIKE; public static EnumQueryComparator[] getNumericComparators() { return new EnumQueryComparator[] { EQUAL, NOT_EQUAL, GREATHER_THAN, LESS_THAN, GREATHER_EQUAL, LESS_EQUAL }; } public static EnumQueryComparator[] getArrayComparators() { return new EnumQueryComparator[] { EQUAL, NOT_EQUAL, IN, NOT_IN }; } public static EnumQueryComparator[] getTextComparators() { return new EnumQueryComparator[] { EQUAL, NOT_EQUAL, GREATHER_THAN, LESS_THAN, GREATHER_EQUAL, LESS_EQUAL, LIKE, NOT_LIKE }; } }
lucas-ejb/ejbModule/florian_haas/lucas/database/EnumQueryComparator.java
Added EnumQueryComparator
lucas-ejb/ejbModule/florian_haas/lucas/database/EnumQueryComparator.java
Added EnumQueryComparator
Java
agpl-3.0
error: pathspec 'wasp-cli/src/main/java/edu/yu/einstein/wasp/cli/TemplateFileHandler.java' did not match any file(s) known to git
6e90e86c2b62e0fb7ba36f3770139268491bf742
1
WASP-System/central,WASP-System/central,WASP-System/central,WASP-System/central,WASP-System/central,WASP-System/central
package edu.yu.einstein.wasp.cli; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; public class TemplateFileHandler { public static void createTemplateFile(Path path, boolean isUsingExistingCellLibraries) throws IOException{ Path templateFile; try { Files.deleteIfExists(path); templateFile = Files.createFile(path); } catch (IOException e) { throw new IOException("Unable to create file " + path.getFileName()); } List<String> lines = new ArrayList<>(); lines.add("# It is not necessary to duplicate entries, e.g. if supplying many samples for one job, a job entry will be matched with samples on subsequent lines until the next job entry is found."); lines.add("# You can specify a metadata entry directly via it's key, e.g. FileMeta.area.key (where the entire metakey equals 'area.key')."); lines.add("# e.g. if you are providing columns for a FileGroup, any extra columns you add for FilegroupMeta etries will be linked to the FileGroup when it is created."); lines.add("# to view available file types use the CLI: 'wasp -u <user> -p <pass> -list fileTypes'"); lines.add("#"); lines.add("# enter N/A for uknown values"); lines.add("#"); if (isUsingExistingCellLibraries){ lines.add("# You can add additional metadata columns for any of SampleSourceMeta, SampleMeta, FileGroupMeta and FileHandleMeta"); lines.add("# to view available cell libraries use the CLI: 'wasp -u <user> -p <pass> -list cellLibraries'"); lines.add("cellLibraryId,FileGroup.description,FileGroup.fileTypeId,FileHandle.fileName,FileHandle.fileURI,FileHandle.md5hash"); } else { lines.add("# You can add additional metadata columns for any of JobMeta, SampleMeta, FileGroupMeta and FileHandleMeta"); lines.add("# to view available assay workflows use the CLI: 'wasp -u <user> -p <pass> -list workflows'"); lines.add("# to view available sample subtypes use the CLI: 'wasp -u <user> -p <pass> -list sampleSubtypes'"); lines.add("# to view available genome builds use the CLI: 'wasp -u <user> -p <pass> -list builds'"); lines.add("# or combined in one command: 'wasp -u <user> -p <pass> -list workflows,sampleSubtypes,builds'"); lines.add("Job.name,Job.workflowId,Sample.name,Sample.sampleSubtypeId,SampleMeta.genome.genomeString,FileGroup.description,FileGroup.fileTypeId,FileHandle.fileName,FileHandle.fileURI,FileHandle.md5hash"); } try { Files.write(templateFile, lines, Charset.defaultCharset()); } catch (IOException e) { throw new IOException("Unable to write to file " + path.getFileName()); } } public static List<List<String>> importTemplateFileData(Path path) throws IOException{ List<List<String>> data = new ArrayList<>(); List<String> lines = new ArrayList<>(); if (!Files.exists(path)){ throw new IOException("File does not exist: " + path.getFileName()); } try { lines = Files.readAllLines(path, Charset.defaultCharset()); } catch (IOException e) { throw new IOException("Unable read file " + path.getFileName()); } int lineCount = 0; for (String line : lines){ if (line.trim().startsWith("#")) continue; data.add(new ArrayList<String>()); int elementCount = 0; for (String element : line.split(",")){ if (element.isEmpty()){ if (lineCount <= 1) throw new IOException("Neither header line or first line of data can have empty elements"); element = data.get(lineCount-1).get(elementCount); // value on previous line } else if (element.equalsIgnoreCase("N/A")) element = ""; data.get(lineCount).add(element); elementCount++; } lineCount++; } return data; } }
wasp-cli/src/main/java/edu/yu/einstein/wasp/cli/TemplateFileHandler.java
added forgotten new file
wasp-cli/src/main/java/edu/yu/einstein/wasp/cli/TemplateFileHandler.java
added forgotten new file
Java
agpl-3.0
error: pathspec 'src/test/java/org/kuali/kra/award/htmlunitwebtest/AwardReportsWebTest.java' did not match any file(s) known to git
2f63396888120ce6a286068799bdc9eea4ecd314
1
iu-uits-es/kc,ColostateResearchServices/kc,geothomasp/kcmit,jwillia/kc-old1,UniversityOfHawaiiORS/kc,iu-uits-es/kc,mukadder/kc,jwillia/kc-old1,iu-uits-es/kc,kuali/kc,ColostateResearchServices/kc,geothomasp/kcmit,mukadder/kc,jwillia/kc-old1,ColostateResearchServices/kc,geothomasp/kcmit,geothomasp/kcmit,UniversityOfHawaiiORS/kc,mukadder/kc,jwillia/kc-old1,geothomasp/kcmit,kuali/kc,UniversityOfHawaiiORS/kc,kuali/kc
/* * Copyright 2006-2008 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.award.htmlunitwebtest; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.kuali.core.util.KualiDecimal; import com.gargoylesoftware.htmlunit.html.HtmlForm; import com.gargoylesoftware.htmlunit.html.HtmlImageInput; import com.gargoylesoftware.htmlunit.html.HtmlPage; /** * * This is the integration test for Award Reports page. */ public class AwardReportsWebTest extends AwardWebTestBase{ HtmlPage awardPaymentReportsAndTermsPage; /** * The set up method calls the parent super method and gets the * award Payment, Reports and Terms page after that. * @see org.kuali.kra.award.htmlunitwebtest.AwardWebTestBase#setUp() */ @Before public void setUp() throws Exception { super.setUp(); awardPaymentReportsAndTermsPage = getPaymentReportsAndTermsPage(); } /** * This method calls parent tear down method and than sets awardTimeAndMoneyPage to null * @see org.kuali.kra.award.htmlunitwebtest.AwardWebTestBase#tearDown() */ @After public void tearDown() throws Exception { awardPaymentReportsAndTermsPage = null; super.tearDown(); } /** * * This method tests the adding of 2 F & A Rates (on and off campus) * and saving them. * @throws Exception */ @Test public void testAwardReportsSimpleAdd() throws Exception{ setFieldValue(awardPaymentReportsAndTermsPage, "newAwardReportTerm[0].reportCode", "5"); awardPaymentReportsAndTermsPage = clickOn(awardPaymentReportsAndTermsPage, "methodToCall.refreshPulldownOptions"); setFieldValue(awardPaymentReportsAndTermsPage, "newAwardReportTerm[0].frequencyCode", "14"); awardPaymentReportsAndTermsPage = clickOn(awardPaymentReportsAndTermsPage, "methodToCall.refreshPulldownOptions"); setFieldValue(awardPaymentReportsAndTermsPage, "newAwardReportTerm[0].frequencyBaseCode", "2"); setFieldValue(awardPaymentReportsAndTermsPage, "newAwardReportTerm[0].ospDistributionCode", "1"); setFieldValue(awardPaymentReportsAndTermsPage, "newAwardReportTerm[0].dueDate", "06/30/2008"); final HtmlForm form1 = (HtmlForm) awardPaymentReportsAndTermsPage.getForms().get(0); String completeButtonName1=getImageTagName(awardPaymentReportsAndTermsPage, "methodToCall.addAwardReportTerm.reportClass1.reportClassIndex0"); final HtmlImageInput button1 = (HtmlImageInput) form1.getInputByName(completeButtonName1); HtmlPage awardPaymentReportsAndTermsPageAfterAdd = (HtmlPage) button1.click(); HtmlPage awardPaymentReportsAndTermsPageAfterSave = clickOn(awardPaymentReportsAndTermsPageAfterAdd, "methodToCall.save"); assertDoesNotContain(awardPaymentReportsAndTermsPageAfterSave, ERROR_TABLE_OR_VIEW_DOES_NOT_EXIST); assertDoesNotContain(awardPaymentReportsAndTermsPageAfterSave, ERRORS_FOUND_ON_PAGE); assertContains(awardPaymentReportsAndTermsPageAfterSave,SAVE_SUCCESS_MESSAGE); assertContains(awardPaymentReportsAndTermsPageAfterSave,"Fiscal (1) "); } }
src/test/java/org/kuali/kra/award/htmlunitwebtest/AwardReportsWebTest.java
KCAWD-20 - added awardReportsWebTest, deleted unused tests from AwardPaymentReportsAndTermsActionTest
src/test/java/org/kuali/kra/award/htmlunitwebtest/AwardReportsWebTest.java
KCAWD-20 - added awardReportsWebTest, deleted unused tests from AwardPaymentReportsAndTermsActionTest
Java
agpl-3.0
error: pathspec 'src/test/java/org/kuali/kra/proposaldevelopment/web/ProposalActionsWebTest.java' did not match any file(s) known to git
19a70d057cfd6e8d139ff2b15d392a2e35c3efef
1
jwillia/kc-old1,kuali/kc,jwillia/kc-old1,mukadder/kc,geothomasp/kcmit,geothomasp/kcmit,UniversityOfHawaiiORS/kc,ColostateResearchServices/kc,geothomasp/kcmit,UniversityOfHawaiiORS/kc,geothomasp/kcmit,UniversityOfHawaiiORS/kc,mukadder/kc,mukadder/kc,iu-uits-es/kc,jwillia/kc-old1,kuali/kc,geothomasp/kcmit,iu-uits-es/kc,kuali/kc,iu-uits-es/kc,ColostateResearchServices/kc,ColostateResearchServices/kc,jwillia/kc-old1
/* * Copyright 2006-2008 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.proposaldevelopment.web; import java.util.List; import org.junit.Test; import org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument; import com.gargoylesoftware.htmlunit.html.HtmlElement; import com.gargoylesoftware.htmlunit.html.HtmlPage; public class ProposalActionsWebTest extends ProposalDevelopmentWebTestBase { private static final String VALID_OPPORTUNITY_ID_APP_S2_S_TEST_SF424_V2 = "APP-S2S-TEST-SF424-V2"; private static final String VALID_CFDA_NUMBER_00_000 = "00.000"; private static final String GRANTS_GOV_TAB_NAME = "methodToCall.headerTab.headerDispatch.save.navigateTo.grantsGov.x"; private static final String PROPOSAL_ACTIONS_TAB_NAME = "methodToCall.headerTab.headerDispatch.save.navigateTo.actions.x"; private static final String BUDGET_VERSIONS_TAB_NAME = "methodToCall.headerTab.headerDispatch.save.navigateTo.budgetVersions.x"; /** * * Test Grants.gov lookup when both CFDA Number and Opportunity Id are passed to the lookup helper service * then tests the ProposalActions DataValidation * @throws Exception */ @Test public void testDataValidationGrantsDotGovErrors() throws Exception{ HtmlPage proposalPage = getProposalDevelopmentPage(); setRequiredFields(proposalPage, DEFAULT_DOCUMENT_DESCRIPTION, "005891", DEFAULT_PROPOSAL_TITLE, "08/14/2007", "08/21/2007", DEFAULT_PROPOSAL_ACTIVITY_TYPE, DEFAULT_PROPOSAL_TYPE_CODE, DEFAULT_PROPOSAL_OWNED_BY_UNIT); String documentNumber = getFieldValue(proposalPage, "document.documentHeader.documentNumber"); setFieldValue(proposalPage, "document.programAnnouncementTitle", "we want give you money"); setFieldValue(proposalPage, "document.cfdaNumber", VALID_CFDA_NUMBER_00_000); setFieldValue(proposalPage, "document.programAnnouncementNumber", VALID_OPPORTUNITY_ID_APP_S2_S_TEST_SF424_V2); HtmlPage savedProposalPage = clickOn(proposalPage, "methodToCall.save", "Kuali :: Proposal Development Document"); HtmlPage page1 = clickOn(savedProposalPage, GRANTS_GOV_TAB_NAME); HtmlPage page2 = lookup(page1, "document.programAnnouncementNumber","opportunityId",VALID_OPPORTUNITY_ID_APP_S2_S_TEST_SF424_V2,false); assertContains(page2,VALID_CFDA_NUMBER_00_000); assertContains(page2,VALID_OPPORTUNITY_ID_APP_S2_S_TEST_SF424_V2); setFieldValue(page2,"document.s2sOpportunity.s2sSubmissionTypeCode","1"); HtmlPage page3 = clickOn(page2, "methodToCall.save", "Kuali :: Proposal Development Document"); ProposalDevelopmentDocument doc = (ProposalDevelopmentDocument) getDocument(documentNumber); assertEquals(doc.getS2sOpportunity().getOpportunityId(),VALID_OPPORTUNITY_ID_APP_S2_S_TEST_SF424_V2); assertEquals(doc.getS2sOpportunity().getCfdaNumber(),VALID_CFDA_NUMBER_00_000); assertEquals(doc.getS2sOpportunity().getS2sSubmissionTypeCode().toString(),"1"); ProposalDevelopmentDocument propDevDoc = (ProposalDevelopmentDocument) getDocument(documentNumber); if (propDevDoc.getS2sOpportunity() != null) { log.debug("Saved document has opportunity["+propDevDoc.getS2sOpportunity().getOpportunityId()+"]"); } else { log.debug("Saved document has opportunity is null"); } HtmlPage proposalActionsPage = clickOn(page3, PROPOSAL_ACTIONS_TAB_NAME); HtmlPage auditOffProposalPage = clickOn(proposalActionsPage, "methodToCall.toggleTab.tabDataValidation", "Kuali :: Proposal Development Document"); HtmlPage auditOnProposalPage = clickOn(auditOffProposalPage, "methodToCall.activate", "Kuali :: Proposal Development Document"); List<HtmlElement> grantsDotGovErrors = getAllElementsByName(auditOnProposalPage, "methodToCall.toggleTab.tabGrantsGovGrantsGovErrors", false); int numberGDGErrors = grantsDotGovErrors.size(); System.out.println("# errors = ["+numberGDGErrors+"]"); org.junit.Assert.assertTrue(numberGDGErrors>0); HtmlPage budgetVersionsPage = clickOn(page3, BUDGET_VERSIONS_TAB_NAME); HtmlPage nextProposalActionsPage = clickOn(budgetVersionsPage, PROPOSAL_ACTIONS_TAB_NAME); List<HtmlElement> nextGrantsDotGovErrors = getAllElementsByName(nextProposalActionsPage, "methodToCall.toggleTab.tabGrantsGovGrantsGovErrors", false); assertTrue(nextGrantsDotGovErrors.size()==numberGDGErrors); } }
src/test/java/org/kuali/kra/proposaldevelopment/web/ProposalActionsWebTest.java
KRACOEUS-1762, added WebTest Unit test to continue to validate this fix.
src/test/java/org/kuali/kra/proposaldevelopment/web/ProposalActionsWebTest.java
KRACOEUS-1762, added WebTest Unit test to continue to validate this fix.
Java
lgpl-2.1
error: pathspec 'src/dr/app/beagle/tools/BeagleSequenceSimulator.java' did not match any file(s) known to git
9f34a05af81487524da63efa16cb23b53cc7823a
1
armanbilge/BEAST_sandbox,armanbilge/BEAST_sandbox,armanbilge/BEAST_sandbox,svn2github/beast-mcmc,svn2github/beast-mcmc,svn2github/beast-mcmc,svn2github/beast-mcmc,armanbilge/BEAST_sandbox,svn2github/beast-mcmc,armanbilge/BEAST_sandbox
package dr.app.beagle.tools; import dr.app.beagle.evomodel.sitemodel.GammaSiteRateModel; import dr.app.beagle.evomodel.substmodel.FrequencyModel; import dr.app.beagle.evomodel.substmodel.HKY; import dr.evolution.alignment.SimpleAlignment; import dr.evolution.datatype.Codons; import dr.evolution.datatype.DataType; import dr.evolution.datatype.Nucleotides; import dr.evolution.io.NewickImporter; import dr.evolution.sequence.Sequence; import dr.evolution.tree.NodeRef; import dr.evolution.tree.Tree; import dr.evomodel.branchratemodel.BranchRateModel; import dr.evomodel.branchratemodel.DefaultBranchRateModel; import dr.inference.markovjumps.StateHistory; import dr.inference.model.Parameter; import dr.math.MathUtils; @SuppressWarnings("serial") public class BeagleSequenceSimulator extends SimpleAlignment { /** number of replications **/ private int nReplications; /** tree used for generating samples **/ private Tree tree; /** site model used for generating samples **/ private GammaSiteRateModel siteModel; /** branch rate model used for generating samples **/ private BranchRateModel branchRateModel; /** nr of categories in site model **/ int categoryCount; /** nr of states in site model **/ int stateCount; private boolean has_ancestralSequence = false; private Sequence ancestralSequence; /** * Constructor * * @param tree * @param siteModel * @param branchRateModel * @param nReplications: nr of sites to generate */ public BeagleSequenceSimulator(Tree tree, GammaSiteRateModel siteModel, BranchRateModel branchRateModel, int nReplications) { this.tree = tree; this.siteModel = siteModel; this.branchRateModel = branchRateModel; this.nReplications = nReplications; stateCount = this.siteModel.getSubstitutionModel().getDataType().getStateCount(); categoryCount = this.siteModel.getCategoryCount(); } // END: Constructor /** * Convert integer representation of sequence into a Sequence * * @param seq integer representation of the sequence * @param node used to determine taxon for sequence * @return Sequence */ Sequence intArray2Sequence(int[] seq, NodeRef node) { String sSeq = ""; DataType dataType = siteModel.getSubstitutionModel().getDataType(); for (int i = 0; i < nReplications; i++) { if (dataType instanceof Codons) { String s = dataType.getTriplet(seq[i]); sSeq += s; } else { String c = dataType.getCode(seq[i]); sSeq += c; } } return new Sequence(tree.getNodeTaxon(node), sSeq); }// END: intArray2Sequence void setAncestralSequence(Sequence seq) { ancestralSequence = seq; has_ancestralSequence = true; }// END: setAncestralSequence int[] sequence2intArray(Sequence seq) { int array[] = new int[nReplications]; if (seq.getLength() != nReplications) { throw new RuntimeException("Ancestral sequence length has " + seq.getLength() + " characters " + "expecting " + nReplications + " characters"); } else { for (int i = 0; i < nReplications; i++) { array[i] = siteModel.getSubstitutionModel().getDataType() .getState(seq.getChar(i)); } } return array; }// END: sequence2intArray /** * perform the actual sequence generation * * @return alignment containing randomly generated sequences for the nodes * in the leaves of the tree */ public void simulate() { double[] lambda = new double[stateCount * stateCount]; NodeRef root = tree.getRoot(); double[] categoryProbs = siteModel.getCategoryProportions(); int[] category = new int[nReplications]; for (int i = 0; i < nReplications; i++) { category[i] = MathUtils.randomChoicePDF(categoryProbs); } int[] seq = new int[nReplications]; if (has_ancestralSequence) { seq = sequence2intArray(ancestralSequence); } else { FrequencyModel frequencyModel = siteModel.getSubstitutionModel() .getFrequencyModel(); for (int i = 0; i < nReplications; i++) { seq[i] = MathUtils.randomChoicePDF(frequencyModel .getFrequencies()); } } this.setReportCountStatistics(true); setDataType(siteModel.getSubstitutionModel().getDataType()); traverse(root, seq, category, this, lambda); }//END: simulate /** * recursively walk through the tree top down, and add sequence to alignment * whenever a leave node is reached. * * @param node * reference to the current node, for which we visit all children * @param parentSequence * randomly generated sequence of the parent node * @param category * array of categories for each of the sites * @param alignment */ private void traverse(NodeRef node, int[] parentSequence, int[] category, SimpleAlignment alignment, double[] lambda) { for (int iChild = 0; iChild < tree.getChildCount(node); iChild++) { NodeRef child = tree.getChild(node, iChild); int[] seq = new int[nReplications]; StateHistory[] histories = new StateHistory[nReplications]; for (int i = 0; i < nReplications; i++) { histories[i] = simulateAlongBranch(tree, child, category[i], parentSequence[i], lambda); seq[i] = histories[i].getEndingState(); } if (tree.getChildCount(child) == 0) { alignment.addSequence(intArray2Sequence(seq, child)); } traverse(tree.getChild(node, iChild), seq, category, alignment, lambda); } }// END: traverse private StateHistory simulateAlongBranch(Tree tree, NodeRef node, int rateCategory, int startingState, double[] lambda) { NodeRef parent = tree.getParent(node); final double branchRate = branchRateModel.getBranchRate(tree, node); // Get the operational time of the branch final double branchTime = branchRate * (tree.getNodeHeight(parent) - tree.getNodeHeight(node)); if (branchTime < 0.0) { throw new RuntimeException("Negative branch length: " + branchTime); } double branchLength = siteModel.getRateForCategory(rateCategory) * branchTime; return StateHistory.simulateUnconditionalOnEndingState(0.0, startingState, branchLength, lambda, stateCount); }// END: simulateAlongBranch public String toString() { StringBuffer sb = new StringBuffer(); // alignment output sb.append("alignment\n"); sb.append(super.toString()); sb.append("\n"); return sb.toString(); }// END: toString /** generate simple site model, for testing purposes **/ static GammaSiteRateModel getDefaultGammaSiteRateModel() { Parameter kappa = new Parameter.Default(1, 2); Parameter freqs = new Parameter.Default(new double[] { 0.25, 0.25, 0.25, 0.25 }); FrequencyModel f = new FrequencyModel(Nucleotides.INSTANCE, freqs); HKY hky = new HKY(kappa, f); // TODO return new GammaSiteRateModel(hky.getModelName()); } // getDefaultSiteModel public static void main(String[] args) { try { int nReplications = 10; // create tree NewickImporter importer = new NewickImporter( "((A:1.0,B:1.0)AB:1.0,(C:1.0,D:1.0)CD:1.0)ABCD;"); Tree tree = importer.importTree(null); // create site model GammaSiteRateModel siteModel = getDefaultGammaSiteRateModel(); // create branch rate model BranchRateModel branchRateModel = new DefaultBranchRateModel(); // feed to sequence simulator and generate leaves BeagleSequenceSimulator treeSimulator = new BeagleSequenceSimulator( tree, siteModel, branchRateModel, nReplications); Sequence ancestralSequence = new Sequence(); ancestralSequence.appendSequenceString("TCAGGTCAAG"); treeSimulator.setAncestralSequence(ancestralSequence); System.out.println(treeSimulator.toString()); } catch (Exception e) { e.printStackTrace(); }// END: try-catch block } // END: main }// END: class
src/dr/app/beagle/tools/BeagleSequenceSimulator.java
working on seq simulator for Beagle git-svn-id: 67bc77c75b8364e4e9cdff0eb6560f5818674cd8@4832 ca793f91-a31e-0410-b540-2769d408b6a1
src/dr/app/beagle/tools/BeagleSequenceSimulator.java
working on seq simulator for Beagle
Java
unlicense
error: pathspec 'CoolGame.java' did not match any file(s) known to git
ba8dd733a5917437093af27a733084b39cd19098
1
czarlos/SwordRun,kjian279/fishing-for-treasure,chinnychin19/CS308_Proj1,myhgew/JGame_2DShootingGame
class CoolGame { public static void main (String args[]) { System.out.println("Cool!"); } }
CoolGame.java
Added the first Java file.
CoolGame.java
Added the first Java file.
Java
apache-2.0
a0f81df70b4f0177c6978a4d1190aff4f77e933f
0
ryano144/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,semonte/intellij-community,allotria/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,consulo/consulo,amith01994/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,clumsy/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,caot/intellij-community,asedunov/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,allotria/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,consulo/consulo,suncycheng/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,ibinti/intellij-community,jagguli/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,tmpgit/intellij-community,samthor/intellij-community,fnouama/intellij-community,jexp/idea2,MER-GROUP/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,petteyg/intellij-community,vladmm/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,retomerz/intellij-community,fitermay/intellij-community,kool79/intellij-community,tmpgit/intellij-community,slisson/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,da1z/intellij-community,adedayo/intellij-community,da1z/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,fitermay/intellij-community,izonder/intellij-community,blademainer/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,izonder/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,slisson/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,kdwink/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,samthor/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,izonder/intellij-community,caot/intellij-community,amith01994/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,slisson/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,xfournet/intellij-community,xfournet/intellij-community,ibinti/intellij-community,semonte/intellij-community,xfournet/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,retomerz/intellij-community,supersven/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,joewalnes/idea-community,michaelgallacher/intellij-community,asedunov/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,joewalnes/idea-community,semonte/intellij-community,fitermay/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,retomerz/intellij-community,FHannes/intellij-community,ibinti/intellij-community,blademainer/intellij-community,consulo/consulo,ahb0327/intellij-community,hurricup/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,da1z/intellij-community,ibinti/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,supersven/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,holmes/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,allotria/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,joewalnes/idea-community,ryano144/intellij-community,signed/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,ryano144/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,samthor/intellij-community,orekyuu/intellij-community,holmes/intellij-community,ibinti/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,signed/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,adedayo/intellij-community,vladmm/intellij-community,retomerz/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,joewalnes/idea-community,SerCeMan/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,jexp/idea2,FHannes/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,supersven/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,ibinti/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,blademainer/intellij-community,samthor/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,adedayo/intellij-community,signed/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,retomerz/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,dslomov/intellij-community,holmes/intellij-community,asedunov/intellij-community,holmes/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,robovm/robovm-studio,joewalnes/idea-community,ernestp/consulo,caot/intellij-community,caot/intellij-community,apixandru/intellij-community,diorcety/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,ernestp/consulo,robovm/robovm-studio,ryano144/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,apixandru/intellij-community,kool79/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,apixandru/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,caot/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,holmes/intellij-community,slisson/intellij-community,da1z/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,amith01994/intellij-community,diorcety/intellij-community,dslomov/intellij-community,petteyg/intellij-community,da1z/intellij-community,wreckJ/intellij-community,da1z/intellij-community,semonte/intellij-community,semonte/intellij-community,Lekanich/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,jexp/idea2,gnuhub/intellij-community,signed/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,petteyg/intellij-community,hurricup/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,xfournet/intellij-community,ibinti/intellij-community,dslomov/intellij-community,da1z/intellij-community,signed/intellij-community,dslomov/intellij-community,robovm/robovm-studio,apixandru/intellij-community,signed/intellij-community,clumsy/intellij-community,signed/intellij-community,ernestp/consulo,nicolargo/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,allotria/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,clumsy/intellij-community,semonte/intellij-community,blademainer/intellij-community,jexp/idea2,supersven/intellij-community,petteyg/intellij-community,retomerz/intellij-community,izonder/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,slisson/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,da1z/intellij-community,youdonghai/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,ernestp/consulo,Distrotech/intellij-community,semonte/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,fitermay/intellij-community,kool79/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,izonder/intellij-community,kool79/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,supersven/intellij-community,joewalnes/idea-community,Distrotech/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,petteyg/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,joewalnes/idea-community,gnuhub/intellij-community,kdwink/intellij-community,clumsy/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,consulo/consulo,ol-loginov/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,hurricup/intellij-community,da1z/intellij-community,orekyuu/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,apixandru/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,slisson/intellij-community,signed/intellij-community,clumsy/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,jexp/idea2,apixandru/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,jexp/idea2,ftomassetti/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,holmes/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,jexp/idea2,petteyg/intellij-community,fnouama/intellij-community,asedunov/intellij-community,slisson/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,consulo/consulo,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,caot/intellij-community,xfournet/intellij-community,fitermay/intellij-community,petteyg/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,joewalnes/idea-community,joewalnes/idea-community,adedayo/intellij-community,orekyuu/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,allotria/intellij-community,blademainer/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,signed/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,retomerz/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,kool79/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,caot/intellij-community,caot/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,samthor/intellij-community,signed/intellij-community,petteyg/intellij-community,vladmm/intellij-community,xfournet/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,caot/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,akosyakov/intellij-community,jexp/idea2,SerCeMan/intellij-community,ftomassetti/intellij-community,signed/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,ernestp/consulo,mglukhikh/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,semonte/intellij-community,Lekanich/intellij-community,supersven/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,ernestp/consulo,vvv1559/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,allotria/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,izonder/intellij-community
package com.intellij.execution.impl; import com.intellij.codeInsight.CodeInsightColors; import com.intellij.execution.filters.*; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.ide.macro.DataAccessor; import com.intellij.ide.GeneralSettings; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.DataConstantsEx; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.diff.actions.DiffActions; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.actionSystem.EditorActionManager; import com.intellij.openapi.editor.actionSystem.TypedAction; import com.intellij.openapi.editor.actionSystem.TypedActionHandler; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.EditorMouseAdapter; import com.intellij.openapi.editor.event.EditorMouseEvent; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.EditorHighlighter; import com.intellij.openapi.editor.ex.HighlighterIterator; import com.intellij.openapi.editor.markup.HighlighterLayer; import com.intellij.openapi.editor.markup.HighlighterTargetArea; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.tree.IElementType; import com.intellij.util.Alarm; import com.intellij.util.EditorPopupHandler; import com.intellij.util.containers.HashMap; import javax.swing.*; import java.awt.*; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; public final class ConsoleViewImpl extends JPanel implements ConsoleView, DataProvider { private static final Logger LOG = Logger.getInstance("#com.intellij.execution.impl.ConsoleViewImpl"); private static final int FLUSH_DELAY = 200; //TODO : make it an option private static final Key<ConsoleViewImpl> CONSOLE_VIEW_IN_EDITOR_VIEW = Key.create("CONSOLE_VIEW_IN_EDITOR_VIEW"); static { final EditorActionManager actionManager = EditorActionManager.getInstance(); final TypedAction typedAction = actionManager.getTypedAction(); typedAction.setupHandler(new MyTypedHandler(typedAction.getHandler())); } private static final Color BACKGROUND_COLOR = Color.white; private static final TextAttributes HYPERLINK_ATTRIBUTES = EditorColorsManager.getInstance().getGlobalScheme().getAttributes(CodeInsightColors.HYPERLINK_ATTRIBUTES); private final DisposedPsiManagerCheck myPsiDisposedCheck; private ConsoleState myState = ConsoleState.NOT_STARTED; private static class TokenInfo{ final ConsoleViewContentType contentType; final int startOffset; int endOffset; final TextAttributes attributes; public TokenInfo(final ConsoleViewContentType contentType, final int startOffset, final int endOffset) { this.contentType = contentType; this.startOffset = startOffset; this.endOffset = endOffset; attributes = contentType.getAttributes(); } } private final Project myProject; private boolean myOutputPaused; private Editor myEditor; private final Object LOCK = new Object(); private int myContentSize; private StringBuffer myDeferredOutput = new StringBuffer(); private StringBuffer myDeferredUserInput = new StringBuffer(); private ArrayList<TokenInfo> myTokens = new ArrayList<TokenInfo>(); private final Hyperlinks myHyperlinks = new Hyperlinks(); private String myHelpId; private Alarm myFlushAlarm = new Alarm(); private final Runnable myFlushDeferredRunnable = new Runnable() { public void run() { if (myProject.isDisposed()) return; flushDeferredText(); } }; private CompositeFilter myMessageFilter = new CompositeFilter(); public ConsoleViewImpl(final Project project) { super(new BorderLayout()); myPsiDisposedCheck = new DisposedPsiManagerCheck(project); myProject = project; addMessageFilter(new ExceptionFilter(myProject));//TEMP! } public void attachToProcess(final ProcessHandler processHandler){ myState = myState.attachTo(this, processHandler); } public void clear() { assertIsDispatchThread(); synchronized(LOCK){ myContentSize = 0; myDeferredOutput.setLength(0); myDeferredUserInput.setLength(0); if (myEditor != null){ ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { myHyperlinks.clear(); myEditor.getMarkupModel().removeAllHighlighters(); myTokens.clear(); CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { myEditor.getDocument().deleteString(0, myEditor.getDocument().getTextLength()); } }, null, null); } }); } } } public void scrollTo(final int offset) { assertIsDispatchThread(); if (myEditor == null) return; if (myState.isFinished() && !hasDeferredOutput()) { myEditor.getCaretModel().moveToOffset(offset); myEditor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE); } else myEditor.getScrollingModel().scrollTo(myEditor.offsetToLogicalPosition(offset), ScrollType.MAKE_VISIBLE); } private static void assertIsDispatchThread() { LOG.assertTrue(ApplicationManager.getApplication().isDispatchThread()); } public void setOutputPaused(final boolean value) { myOutputPaused = value; if (!value){ requestFlushImmediately(); } } public boolean isOutputPaused() { return myOutputPaused; } public boolean hasDeferredOutput() { synchronized(LOCK){ return myDeferredOutput.length() > 0; } } public void performWhenNoDeferredOutput(final Runnable runnable) { //Q: implement in another way without timer? if (!hasDeferredOutput()){ runnable.run(); } else{ new Alarm().addRequest( new Runnable() { public void run() { performWhenNoDeferredOutput(runnable); } }, 100 ); } } public JComponent getComponent() { if (myEditor == null){ myEditor = createEditor(); requestFlushImmediately(); add(myEditor.getComponent(), BorderLayout.CENTER); } return this; } public void dispose(){ myState = myState.dispose(); if (myEditor != null){ myFlushAlarm.cancelAllRequests(); EditorFactory.getInstance().releaseEditor(myEditor); synchronized (LOCK) { myDeferredOutput.setLength(0); } myEditor = null; } } public void print(String s, final ConsoleViewContentType contentType) { synchronized(LOCK){ s = StringUtil.convertLineSeparators(s, "\n"); myContentSize += s.length(); myDeferredOutput.append(s); if (contentType == ConsoleViewContentType.USER_INPUT){ myDeferredUserInput.append(s); } boolean needNew = true; if (!myTokens.isEmpty()){ final TokenInfo lastToken = myTokens.get(myTokens.size() - 1); if (lastToken.contentType == contentType){ lastToken.endOffset = myContentSize; // optimization needNew = false; } } if (needNew){ myTokens.add(new TokenInfo(contentType, myContentSize - s.length(), myContentSize)); } } if (s.indexOf('\n') >= 0 || s.indexOf('\r') >= 0){ if (contentType == ConsoleViewContentType.USER_INPUT){ flushDeferredUserInput(); } } final Runnable requestFlush = new Runnable() { public void run() { if (myFlushAlarm.getActiveRequestCount() == 0) { myFlushAlarm.addRequest(myFlushDeferredRunnable, FLUSH_DELAY); } } }; if (EventQueue.isDispatchThread()) requestFlush.run(); else SwingUtilities.invokeLater(requestFlush); } private void requestFlushImmediately() { myFlushAlarm.addRequest(new Runnable() { public void run() { flushDeferredText(); } }, 0); } public int getContentSize() { return myContentSize; } public boolean canPause() { return true; } private void flushDeferredText(){ LOG.assertTrue(ApplicationManager.getApplication().isDispatchThread()); synchronized(LOCK){ if (myOutputPaused) return; if (myDeferredOutput.length() == 0) return; } if (myEditor != null) { final String text = myDeferredOutput.substring(0, myDeferredOutput.length()); myDeferredOutput.setLength(0); final Document document = myEditor.getDocument(); final int oldLineCount = document.getLineCount(); final boolean isAtEndOfDocument = myEditor.getCaretModel().getOffset() == myEditor.getDocument().getTextLength(); ApplicationManager.getApplication().runWriteAction( new Runnable() { public void run() { CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { document.insertString(document.getTextLength(), text); } }, null, null); } } ); final int newLineCount = document.getLineCount(); if (oldLineCount < newLineCount){ myPsiDisposedCheck.performCheck(); highlightHyperlinks(oldLineCount - 1, newLineCount - 2); } if (isAtEndOfDocument) { myEditor.getCaretModel().moveToOffset(myEditor.getDocument().getTextLength()); myEditor.getSelectionModel().removeSelection(); myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); } } } private void flushDeferredUserInput() { if (myState.isRunning()){ synchronized(LOCK){ final String text = myDeferredUserInput.substring(0, myDeferredUserInput.length()); final int index = Math.max(text.lastIndexOf('\n'), text.lastIndexOf('\r')); if (index < 0) return; try{ myState.sendUserInput(text.substring(0, index + 1)); } catch(IOException e){ return; } myDeferredUserInput.setLength(0); myDeferredUserInput.append(text.substring(index + 1)); } } } public Object getData(final String dataId) { if (DataConstants.NAVIGATABLE.equals(dataId)){ if (myEditor == null) { return null; } final LogicalPosition pos = myEditor.getCaretModel().getLogicalPosition(); final HyperlinkInfo info = getHyperlinkInfoByLineAndCol(pos.line, pos.column); final OpenFileDescriptor openFileDescriptor = info instanceof OpenFileHyperlinkInfo ? ((OpenFileHyperlinkInfo)info).getDescriptor() : null; if (openFileDescriptor == null || !openFileDescriptor.getFile().isValid()) { return null; } return openFileDescriptor; } if (DataConstants.EDITOR.equals(dataId)) { return myEditor; } if (DataConstantsEx.HELP_ID.equals(dataId)) { return myHelpId; } return null; } public void setHelpId(final String helpId) { myHelpId = helpId; } public void addMessageFilter(final Filter filter) { myMessageFilter.addFilter(filter); } public void printHyperlink(final String hyperlinkText, final HyperlinkInfo info) { if (myEditor == null) return; print(hyperlinkText, ConsoleViewContentType.NORMAL_OUTPUT); flushDeferredText(); final int textLength = myEditor.getDocument().getTextLength(); addHyperlink(textLength - hyperlinkText.length(), textLength, null, info); } private Editor createEditor() { return ApplicationManager.getApplication().runReadAction(new Computable<Editor>() { public Editor compute() { final EditorFactory editorFactory = EditorFactory.getInstance(); final Document editorDocument = editorFactory.createDocument(""); final int bufferSize = GeneralSettings.getInstance().isUseCyclicBuffer() ? GeneralSettings.getInstance().getCyclicBufferSize() : 0; editorDocument.setCyclicBufferSize(bufferSize); final EditorEx editor = (EditorEx) editorFactory.createViewer(editorDocument,myProject); final EditorHighlighter highlighter = new MyHighghlighter(); editor.setHighlighter(highlighter); editor.putUserData(CONSOLE_VIEW_IN_EDITOR_VIEW, ConsoleViewImpl.this); final EditorSettings editorSettings = editor.getSettings(); editorSettings.setLineMarkerAreaShown(false); editorSettings.setLineNumbersShown(false); editorSettings.setFoldingOutlineShown(false); editorSettings.setAdditionalPageAtBottom(false); final EditorColorsScheme scheme = editor.getColorsScheme(); editor.setBackgroundColor(BACKGROUND_COLOR); scheme.setColor(EditorColors.CARET_ROW_COLOR, null); scheme.setColor(EditorColors.RIGHT_MARGIN_COLOR, null); editor.addEditorMouseListener(new EditorPopupHandler(){ public void invokePopup(final EditorMouseEvent event) { final MouseEvent mouseEvent = event.getMouseEvent(); popupInvoked(mouseEvent.getComponent(), mouseEvent.getX(), mouseEvent.getY()); } }); editor.addEditorMouseListener( new EditorMouseAdapter(){ public void mouseReleased(final EditorMouseEvent e){ final MouseEvent mouseEvent = e.getMouseEvent(); if (!mouseEvent.isPopupTrigger()){ navigate(e); } } } ); editor.getContentComponent().addMouseMotionListener( new MouseMotionAdapter(){ public void mouseMoved(final MouseEvent e){ final HyperlinkInfo info = getHyperlinkInfoByPoint(e.getPoint()); if (info != null){ editor.getContentComponent().setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); } else{ editor.getContentComponent().setCursor(Cursor.getPredefinedCursor(Cursor.TEXT_CURSOR)); } } } ); setEditorUpActions(editor); return editor; } }); } private static void setEditorUpActions(final Editor editor) { new EnterHandler().registerCustomShortcutSet(CommonShortcuts.ENTER, editor.getContentComponent()); registerActionHandler(editor, IdeActions.ACTION_EDITOR_PASTE, new PasteHandler()); registerActionHandler(editor, IdeActions.ACTION_EDITOR_BACKSPACE, new BackSpaceHandler()); } private static void registerActionHandler(final Editor editor, final String actionId, final AnAction action) { final Keymap keymap=KeymapManager.getInstance().getActiveKeymap(); final Shortcut[] shortcuts = keymap.getShortcuts(actionId); action.registerCustomShortcutSet(new CustomShortcutSet(shortcuts), editor.getContentComponent()); } private void popupInvoked(final Component component, final int x, final int y){ final DefaultActionGroup group = new DefaultActionGroup(); group.add(new ClearAllAction()); group.add(new CopyAction()); group.addSeparator(); final ActionManager actionManager = ActionManager.getInstance(); group.add(actionManager.getAction(DiffActions.COMPARE_WITH_CLIPBOARD)); final ActionPopupMenu menu = actionManager.createActionPopupMenu(ActionPlaces.UNKNOWN, group); menu.getComponent().show(component, x, y); } private void navigate(final EditorMouseEvent event){ if (event.getMouseEvent().isPopupTrigger()) return; final Point p = event.getMouseEvent().getPoint(); final HyperlinkInfo info = getHyperlinkInfoByPoint(p); if (info != null){ info.navigate(myProject); } } private HyperlinkInfo getHyperlinkInfoByPoint(final Point p){ final LogicalPosition pos = myEditor.xyToLogicalPosition(new Point(p.x, p.y)); return getHyperlinkInfoByLineAndCol(pos.line, pos.column); } private HyperlinkInfo getHyperlinkInfoByLineAndCol(final int line, final int col) { final int offset = myEditor.logicalPositionToOffset(new LogicalPosition(line, col)); return myHyperlinks.getHyperlinkAt(offset); } private void highlightHyperlinks(final int line1, final int line2){ if (myMessageFilter != null){ LOG.assertTrue(ApplicationManager.getApplication().isDispatchThread()); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); final Document document = myEditor.getDocument(); final CharSequence chars = document.getCharsSequence(); for(int line = line1; line <= line2; line++){ if (line < 0) continue; final int startOffset = document.getLineStartOffset(line); int endOffset = document.getLineEndOffset(line); if (endOffset < document.getTextLength()){ endOffset++; // add '\n' } final String text = chars.subSequence(startOffset, endOffset).toString(); final Filter.Result result = myMessageFilter.applyFilter(text, endOffset); if (result != null){ final int highlightStartOffset = result.highlightStartOffset; final int highlightEndOffset = result.highlightEndOffset; final HyperlinkInfo hyperlinkInfo = result.hyperlinkInfo; addHyperlink(highlightStartOffset, highlightEndOffset, result.highlightAttributes, hyperlinkInfo); } } } } private void addHyperlink(final int highlightStartOffset, final int highlightEndOffset, final TextAttributes highlightAttributes, final HyperlinkInfo hyperlinkInfo) { TextAttributes textAttributes = highlightAttributes != null ? highlightAttributes : HYPERLINK_ATTRIBUTES; final RangeHighlighter highlighter = myEditor.getMarkupModel().addRangeHighlighter(highlightStartOffset, highlightEndOffset, HighlighterLayer.SELECTION - 1, textAttributes, HighlighterTargetArea.EXACT_RANGE); myHyperlinks.add(highlighter, hyperlinkInfo); } private class ClearAllAction extends AnAction{ public ClearAllAction(){ super("Clear All"); } public void actionPerformed(final AnActionEvent e){ clear(); } } private class CopyAction extends AnAction{ public CopyAction(){ super(myEditor.getSelectionModel().hasSelection() ? "Copy Selected Content" : "Copy Content"); } public void actionPerformed(final AnActionEvent e){ if (myEditor.getSelectionModel().hasSelection()){ myEditor.getSelectionModel().copySelectionToClipboard(); } else{ myEditor.getSelectionModel().setSelection(0, myEditor.getDocument().getTextLength()); myEditor.getSelectionModel().copySelectionToClipboard(); myEditor.getSelectionModel().removeSelection(); } } } private class MyHighghlighter extends DocumentAdapter implements EditorHighlighter { private boolean myHasEditor; public HighlighterIterator createIterator(final int startOffset) { final int startIndex = findTokenInfoByOffset(myTokens, startOffset); return new HighlighterIterator(){ private int myIndex = startIndex; public TextAttributes getTextAttributes() { return getTokenInfo().attributes; } public int getStart() { return getTokenInfo().startOffset; } public int getEnd() { return getTokenInfo().endOffset; } public IElementType getTokenType() { return null; } public void advance() { myIndex++; } public void retreat() { myIndex--; } public boolean atEnd() { return myIndex < 0 || myIndex >= myTokens.size(); } private TokenInfo getTokenInfo() { return myTokens.get(myIndex); } }; } public void setText(final CharSequence text) { } public void setEditor(final Editor editor) { LOG.assertTrue(!myHasEditor, "Highlighters cannot be reused with different editors"); myHasEditor = true; } public void setColorScheme(EditorColorsScheme scheme) { } } private static int findTokenInfoByOffset(final ArrayList<TokenInfo> tokens, final int offset) { int low = 0; int high = tokens.size() - 1; while(low <= high){ final int mid = (low + high) / 2; final TokenInfo midVal = tokens.get(mid); if (offset < midVal.startOffset){ high = mid - 1; } else if (offset >= midVal.endOffset){ low = mid + 1; } else{ return mid; } } return tokens.size(); } private static class MyTypedHandler implements TypedActionHandler { private TypedActionHandler myOriginalHandler; public MyTypedHandler(final TypedActionHandler originalAction) { myOriginalHandler = originalAction; } public void execute(final Editor editor, final char charTyped, final DataContext dataContext) { final ConsoleViewImpl consoleView = editor.getUserData(CONSOLE_VIEW_IN_EDITOR_VIEW); if (consoleView == null || !consoleView.myState.isRunning()){ myOriginalHandler.execute(editor, charTyped, dataContext); } else{ final String s = String.valueOf(charTyped); consoleView.print(s, ConsoleViewContentType.USER_INPUT); consoleView.flushDeferredText(); } } } private static final DataAccessor<ConsoleViewImpl> CONSOLE = new DataAccessor<ConsoleViewImpl>() { public ConsoleViewImpl getImpl(final DataContext dataContext) throws NoDataException { return EDITOR.getNotNull(dataContext).getUserData(CONSOLE_VIEW_IN_EDITOR_VIEW); } }; private static final Condition<ConsoleViewImpl> CONSOLE_IS_RUNNING = new Condition<ConsoleViewImpl>() { public boolean value(final ConsoleViewImpl consoleView) { return consoleView.myState.isRunning(); } }; private static final DataAccessor<ConsoleViewImpl> RUNNINT_CONSOLE =DataAccessor.createConditionalAccessor(CONSOLE, CONSOLE_IS_RUNNING); private static abstract class ConsoleAction extends AnAction { public void actionPerformed(final AnActionEvent e) { final ConsoleViewImpl console = RUNNINT_CONSOLE.from(e.getDataContext()); execute(console); } protected abstract void execute(ConsoleViewImpl console); public void update(final AnActionEvent e) { final ConsoleViewImpl console = RUNNINT_CONSOLE.from(e.getDataContext()); e.getPresentation().setEnabled(console != null); } } private static class EnterHandler extends ConsoleAction { public void execute(final ConsoleViewImpl consoleView) { consoleView.print("\n", ConsoleViewContentType.USER_INPUT); consoleView.flushDeferredText(); } } private static class PasteHandler extends ConsoleAction { public void execute(final ConsoleViewImpl consoleView) { final Transferable content = CopyPasteManager.getInstance().getContents(); if (content == null) return; String s = null; try { s = (String)content.getTransferData(DataFlavor.stringFlavor); } catch(Exception e) { consoleView.myEditor.getComponent().getToolkit().beep(); } if (s == null) return; consoleView.print(s, ConsoleViewContentType.USER_INPUT); consoleView.flushDeferredText(); } } private static class BackSpaceHandler extends ConsoleAction { public void execute(final ConsoleViewImpl consoleView) { final Editor editor = consoleView.myEditor; final Document document = editor.getDocument(); final int length = document.getTextLength(); if (length == 0) return; synchronized(consoleView.LOCK){ if (consoleView.myTokens.size() == 0) return; final TokenInfo info = consoleView.myTokens.get(consoleView.myTokens.size() - 1); if (info.contentType != ConsoleViewContentType.USER_INPUT) return; if (consoleView.myDeferredUserInput.length() == 0) return; consoleView.myDeferredUserInput.setLength(consoleView.myDeferredUserInput.length() - 1); info.endOffset -= 1; if (info.startOffset == info.endOffset){ consoleView.myTokens.remove(consoleView.myTokens.size() - 1); } consoleView.myContentSize--; } ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { document.deleteString(length - 1, length); editor.getCaretModel().moveToOffset(length - 1); editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); editor.getSelectionModel().removeSelection(); } }); } } private static class Hyperlinks { private static final int NO_INDEX = Integer.MIN_VALUE; private final Map<RangeHighlighter,HyperlinkInfo> myHighlighterToMessageInfoMap = new HashMap<RangeHighlighter, HyperlinkInfo>(); private int myLastIndex = NO_INDEX; public void clear() { myHighlighterToMessageInfoMap.clear(); myLastIndex = NO_INDEX; } public HyperlinkInfo getHyperlinkAt(final int offset) { final Iterator<RangeHighlighter> iterator = myHighlighterToMessageInfoMap.keySet().iterator(); while(iterator.hasNext()){ final RangeHighlighter highlighter = iterator.next(); if (containsOffset(offset, highlighter)){ return myHighlighterToMessageInfoMap.get(highlighter); } } return null; } private static boolean containsOffset(final int offset, final RangeHighlighter highlighter) { return highlighter.getStartOffset() <= offset && offset <= highlighter.getEndOffset(); } public void add(final RangeHighlighter highlighter, final HyperlinkInfo hyperlinkInfo) { myHighlighterToMessageInfoMap.put(highlighter, hyperlinkInfo); if (myLastIndex != NO_INDEX && containsOffset(myLastIndex, highlighter)) myLastIndex = NO_INDEX; } } }
source/com/intellij/execution/impl/ConsoleViewImpl.java
package com.intellij.execution.impl; import com.intellij.codeInsight.CodeInsightColors; import com.intellij.execution.filters.*; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.ide.macro.DataAccessor; import com.intellij.ide.GeneralSettings; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.DataConstantsEx; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.diff.actions.DiffActions; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.actionSystem.EditorActionManager; import com.intellij.openapi.editor.actionSystem.TypedAction; import com.intellij.openapi.editor.actionSystem.TypedActionHandler; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.EditorMouseAdapter; import com.intellij.openapi.editor.event.EditorMouseEvent; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.EditorHighlighter; import com.intellij.openapi.editor.ex.HighlighterIterator; import com.intellij.openapi.editor.markup.HighlighterLayer; import com.intellij.openapi.editor.markup.HighlighterTargetArea; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.ide.CopyPasteManager; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.tree.IElementType; import com.intellij.util.Alarm; import com.intellij.util.EditorPopupHandler; import com.intellij.util.containers.HashMap; import javax.swing.*; import java.awt.*; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; public final class ConsoleViewImpl extends JPanel implements ConsoleView, DataProvider { private static final Logger LOG = Logger.getInstance("#com.intellij.execution.impl.ConsoleViewImpl"); private static final int FLUSH_DELAY = 200; //TODO : make it an option private static final Key<ConsoleViewImpl> CONSOLE_VIEW_IN_EDITOR_VIEW = Key.create("CONSOLE_VIEW_IN_EDITOR_VIEW"); static { final EditorActionManager actionManager = EditorActionManager.getInstance(); final TypedAction typedAction = actionManager.getTypedAction(); typedAction.setupHandler(new MyTypedHandler(typedAction.getHandler())); } private static final Color BACKGROUND_COLOR = Color.white; private static final TextAttributes HYPERLINK_ATTRIBUTES = EditorColorsManager.getInstance().getGlobalScheme().getAttributes(CodeInsightColors.HYPERLINK_ATTRIBUTES); private final DisposedPsiManagerCheck myPsiDisposedCheck; private ConsoleState myState = ConsoleState.NOT_STARTED; private static class TokenInfo{ final ConsoleViewContentType contentType; final int startOffset; int endOffset; final TextAttributes attributes; public TokenInfo(final ConsoleViewContentType contentType, final int startOffset, final int endOffset) { this.contentType = contentType; this.startOffset = startOffset; this.endOffset = endOffset; attributes = contentType.getAttributes(); } } private final Project myProject; private boolean myOutputPaused; private Editor myEditor; private Object LOCK = new Object(); private int myContentSize; private StringBuffer myDeferredOutput = new StringBuffer(); private StringBuffer myDeferredUserInput = new StringBuffer(); private ArrayList<TokenInfo> myTokens = new ArrayList<TokenInfo>(); private final Hyperlinks myHyperlinks = new Hyperlinks(); private String myHelpId; private Alarm myFlushAlarm = new Alarm(); private final Runnable myFlushDeferredRunnable = new Runnable() { public void run() { if (myProject.isDisposed()) return; flushDeferredText(); } }; private CompositeFilter myMessageFilter = new CompositeFilter(); public ConsoleViewImpl(final Project project) { super(new BorderLayout()); myPsiDisposedCheck = new DisposedPsiManagerCheck(project); myProject = project; addMessageFilter(new ExceptionFilter(myProject));//TEMP! } public void attachToProcess(final ProcessHandler processHandler){ myState = myState.attachTo(this, processHandler); } public void clear() { assertIsDispatchThread(); synchronized(LOCK){ myContentSize = 0; myDeferredOutput.setLength(0); myDeferredUserInput.setLength(0); if (myEditor != null){ ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { myHyperlinks.clear(); myEditor.getMarkupModel().removeAllHighlighters(); myTokens.clear(); CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { myEditor.getDocument().deleteString(0, myEditor.getDocument().getTextLength()); } }, null, null); } }); } } } public void scrollTo(final int offset) { assertIsDispatchThread(); if (myEditor == null) return; if (myState.isFinished() && !hasDeferredOutput()) { myEditor.getCaretModel().moveToOffset(offset); myEditor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE); } else myEditor.getScrollingModel().scrollTo(myEditor.offsetToLogicalPosition(offset), ScrollType.MAKE_VISIBLE); } private static void assertIsDispatchThread() { LOG.assertTrue(ApplicationManager.getApplication().isDispatchThread()); } public void setOutputPaused(final boolean value) { myOutputPaused = value; if (!value){ requestFlushImmediately(); } } public boolean isOutputPaused() { return myOutputPaused; } public boolean hasDeferredOutput() { synchronized(LOCK){ return myDeferredOutput.length() > 0; } } public void performWhenNoDeferredOutput(final Runnable runnable) { //Q: implement in another way without timer? if (!hasDeferredOutput()){ runnable.run(); } else{ new Alarm().addRequest( new Runnable() { public void run() { performWhenNoDeferredOutput(runnable); } }, 100 ); } } public JComponent getComponent() { if (myEditor == null){ myEditor = createEditor(); requestFlushImmediately(); add(myEditor.getComponent(), BorderLayout.CENTER); } return this; } public void dispose(){ myState = myState.dispose(); if (myEditor != null){ myFlushAlarm.cancelAllRequests(); EditorFactory.getInstance().releaseEditor(myEditor); synchronized (LOCK) { myDeferredOutput.setLength(0); } myEditor = null; } } public void print(String s, final ConsoleViewContentType contentType) { synchronized(LOCK){ s = StringUtil.convertLineSeparators(s, "\n"); myContentSize += s.length(); myDeferredOutput.append(s); if (contentType == ConsoleViewContentType.USER_INPUT){ myDeferredUserInput.append(s); } boolean needNew = true; if (!myTokens.isEmpty()){ final TokenInfo lastToken = myTokens.get(myTokens.size() - 1); if (lastToken.contentType == contentType){ lastToken.endOffset = myContentSize; // optimization needNew = false; } } if (needNew){ myTokens.add(new TokenInfo(contentType, myContentSize - s.length(), myContentSize)); } } if (s.indexOf('\n') >= 0 || s.indexOf('\r') >= 0){ if (contentType == ConsoleViewContentType.USER_INPUT){ flushDeferredUserInput(); } } final Runnable requestFlush = new Runnable() { public void run() { if (myFlushAlarm.getActiveRequestCount() == 0) { myFlushAlarm.addRequest(myFlushDeferredRunnable, FLUSH_DELAY); } } }; if (EventQueue.isDispatchThread()) requestFlush.run(); else SwingUtilities.invokeLater(requestFlush); } private void requestFlushImmediately() { myFlushAlarm.addRequest(new Runnable() { public void run() { flushDeferredText(); } }, 0); } public int getContentSize() { return myContentSize; } public boolean canPause() { return true; } private void flushDeferredText(){ LOG.assertTrue(ApplicationManager.getApplication().isDispatchThread()); synchronized(LOCK){ if (myOutputPaused) return; if (myDeferredOutput.length() == 0) return; } if (myEditor != null) { final String text = myDeferredOutput.substring(0, myDeferredOutput.length()); myDeferredOutput.setLength(0); final Document document = myEditor.getDocument(); final int oldLineCount = document.getLineCount(); final boolean isAtEndOfDocument = myEditor.getCaretModel().getOffset() == myEditor.getDocument().getTextLength(); ApplicationManager.getApplication().runWriteAction( new Runnable() { public void run() { CommandProcessor.getInstance().executeCommand(myProject, new Runnable() { public void run() { document.insertString(document.getTextLength(), text); } }, null, null); } } ); final int newLineCount = document.getLineCount(); if (oldLineCount < newLineCount){ myPsiDisposedCheck.performCheck(); highlightHyperlinks(oldLineCount - 1, newLineCount - 2); } if (isAtEndOfDocument) { myEditor.getCaretModel().moveToOffset(myEditor.getDocument().getTextLength()); myEditor.getSelectionModel().removeSelection(); myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); } } } private void flushDeferredUserInput() { if (myState.isRunning()){ synchronized(LOCK){ final String text = myDeferredUserInput.substring(0, myDeferredUserInput.length()); final int index = Math.max(text.lastIndexOf('\n'), text.lastIndexOf('\r')); if (index < 0) return; try{ myState.sendUserInput(text.substring(0, index + 1)); } catch(IOException e){ return; } myDeferredUserInput.setLength(0); myDeferredUserInput.append(text.substring(index + 1)); } } } public Object getData(final String dataId) { if (DataConstants.NAVIGATABLE.equals(dataId)){ if (myEditor == null) { return null; } final LogicalPosition pos = myEditor.getCaretModel().getLogicalPosition(); final HyperlinkInfo info = getHyperlinkInfoByLineAndCol(pos.line, pos.column); final OpenFileDescriptor openFileDescriptor = info instanceof OpenFileHyperlinkInfo ? ((OpenFileHyperlinkInfo)info).getDescriptor() : null; if (openFileDescriptor == null || !openFileDescriptor.getFile().isValid()) { return null; } return openFileDescriptor; } if (DataConstants.EDITOR.equals(dataId)) { return myEditor; } if (DataConstantsEx.HELP_ID.equals(dataId)) { return myHelpId; } return null; } public void setHelpId(final String helpId) { myHelpId = helpId; } public void addMessageFilter(final Filter filter) { myMessageFilter.addFilter(filter); } public void printHyperlink(final String hyperlinkText, final HyperlinkInfo info) { if (myEditor == null) return; print(hyperlinkText, ConsoleViewContentType.NORMAL_OUTPUT); flushDeferredText(); final int textLength = myEditor.getDocument().getTextLength(); addHyperlink(textLength - hyperlinkText.length(), textLength, null, info); } private Editor createEditor() { return ApplicationManager.getApplication().runReadAction(new Computable<Editor>() { public Editor compute() { final EditorFactory editorFactory = EditorFactory.getInstance(); final Document editorDocument = editorFactory.createDocument(""); final int bufferSize = GeneralSettings.getInstance().isUseCyclicBuffer() ? GeneralSettings.getInstance().getCyclicBufferSize() : 0; editorDocument.setCyclicBufferSize(bufferSize); final EditorEx editor = (EditorEx) editorFactory.createViewer(editorDocument,myProject); final EditorHighlighter highlighter = new MyHighghlighter(); editor.setHighlighter(highlighter); editor.putUserData(CONSOLE_VIEW_IN_EDITOR_VIEW, ConsoleViewImpl.this); final EditorSettings editorSettings = editor.getSettings(); editorSettings.setLineMarkerAreaShown(false); editorSettings.setLineNumbersShown(false); editorSettings.setFoldingOutlineShown(false); editorSettings.setAdditionalPageAtBottom(false); final EditorColorsScheme scheme = editor.getColorsScheme(); editor.setBackgroundColor(BACKGROUND_COLOR); scheme.setColor(EditorColors.CARET_ROW_COLOR, null); scheme.setColor(EditorColors.RIGHT_MARGIN_COLOR, null); editor.addEditorMouseListener(new EditorPopupHandler(){ public void invokePopup(final EditorMouseEvent event) { final MouseEvent mouseEvent = event.getMouseEvent(); popupInvoked(mouseEvent.getComponent(), mouseEvent.getX(), mouseEvent.getY()); } }); editor.addEditorMouseListener( new EditorMouseAdapter(){ public void mouseReleased(final EditorMouseEvent e){ final MouseEvent mouseEvent = e.getMouseEvent(); if (!mouseEvent.isPopupTrigger()){ navigate(e); } } } ); editor.getContentComponent().addMouseMotionListener( new MouseMotionAdapter(){ public void mouseMoved(final MouseEvent e){ final HyperlinkInfo info = getHyperlinkInfoByPoint(e.getPoint()); if (info != null){ editor.getContentComponent().setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); } else{ editor.getContentComponent().setCursor(Cursor.getPredefinedCursor(Cursor.TEXT_CURSOR)); } } } ); setEditorUpActions(editor); return editor; } }); } private static void setEditorUpActions(final Editor editor) { new EnterHandler().registerCustomShortcutSet(CommonShortcuts.ENTER, editor.getContentComponent()); registerActionHandler(editor, IdeActions.ACTION_EDITOR_PASTE, new PasteHandler()); registerActionHandler(editor, IdeActions.ACTION_EDITOR_BACKSPACE, new BackSpaceHandler()); } private static void registerActionHandler(final Editor editor, final String actionId, final AnAction action) { final Keymap keymap=KeymapManager.getInstance().getActiveKeymap(); final Shortcut[] shortcuts = keymap.getShortcuts(actionId); action.registerCustomShortcutSet(new CustomShortcutSet(shortcuts), editor.getContentComponent()); } private void popupInvoked(final Component component, final int x, final int y){ final DefaultActionGroup group = new DefaultActionGroup(); group.add(new ClearAllAction()); group.add(new CopyAction()); group.addSeparator(); final ActionManager actionManager = ActionManager.getInstance(); group.add(actionManager.getAction(DiffActions.COMPARE_WITH_CLIPBOARD)); final ActionPopupMenu menu = actionManager.createActionPopupMenu(ActionPlaces.UNKNOWN, group); menu.getComponent().show(component, x, y); } private void navigate(final EditorMouseEvent event){ if (event.getMouseEvent().isPopupTrigger()) return; final Point p = event.getMouseEvent().getPoint(); final HyperlinkInfo info = getHyperlinkInfoByPoint(p); if (info != null){ info.navigate(myProject); } } private HyperlinkInfo getHyperlinkInfoByPoint(final Point p){ final LogicalPosition pos = myEditor.xyToLogicalPosition(new Point(p.x, p.y)); return getHyperlinkInfoByLineAndCol(pos.line, pos.column); } private HyperlinkInfo getHyperlinkInfoByLineAndCol(final int line, final int col) { final int offset = myEditor.logicalPositionToOffset(new LogicalPosition(line, col)); return myHyperlinks.getHyperlinkAt(offset); } private void highlightHyperlinks(final int line1, final int line2){ if (myMessageFilter != null){ LOG.assertTrue(ApplicationManager.getApplication().isDispatchThread()); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); final Document document = myEditor.getDocument(); final CharSequence chars = document.getCharsSequence(); for(int line = line1; line <= line2; line++){ if (line < 0) continue; final int startOffset = document.getLineStartOffset(line); int endOffset = document.getLineEndOffset(line); if (endOffset < document.getTextLength()){ endOffset++; // add '\n' } final String text = chars.subSequence(startOffset, endOffset).toString(); final Filter.Result result = myMessageFilter.applyFilter(text, endOffset); if (result != null){ final int highlightStartOffset = result.highlightStartOffset; final int highlightEndOffset = result.highlightEndOffset; final HyperlinkInfo hyperlinkInfo = result.hyperlinkInfo; addHyperlink(highlightStartOffset, highlightEndOffset, result.highlightAttributes, hyperlinkInfo); } } } } private void addHyperlink(final int highlightStartOffset, final int highlightEndOffset, final TextAttributes highlightAttributes, final HyperlinkInfo hyperlinkInfo) { TextAttributes textAttributes = highlightAttributes != null ? highlightAttributes : HYPERLINK_ATTRIBUTES; final RangeHighlighter highlighter = myEditor.getMarkupModel().addRangeHighlighter(highlightStartOffset, highlightEndOffset, HighlighterLayer.SELECTION - 1, textAttributes, HighlighterTargetArea.EXACT_RANGE); myHyperlinks.add(highlighter, hyperlinkInfo); } private class ClearAllAction extends AnAction{ public ClearAllAction(){ super("Clear All"); } public void actionPerformed(final AnActionEvent e){ clear(); } } private class CopyAction extends AnAction{ public CopyAction(){ super(myEditor.getSelectionModel().hasSelection() ? "Copy Selected Content" : "Copy Content"); } public void actionPerformed(final AnActionEvent e){ if (myEditor.getSelectionModel().hasSelection()){ myEditor.getSelectionModel().copySelectionToClipboard(); } else{ myEditor.getSelectionModel().setSelection(0, myEditor.getDocument().getTextLength()); myEditor.getSelectionModel().copySelectionToClipboard(); myEditor.getSelectionModel().removeSelection(); } } } private class MyHighghlighter extends DocumentAdapter implements EditorHighlighter { private boolean myHasEditor; public HighlighterIterator createIterator(final int startOffset) { final int startIndex = findTokenInfoByOffset(myTokens, startOffset); return new HighlighterIterator(){ private int myIndex = startIndex; public TextAttributes getTextAttributes() { return getTokenInfo().attributes; } public int getStart() { return getTokenInfo().startOffset; } public int getEnd() { return getTokenInfo().endOffset; } public IElementType getTokenType() { return null; } public void advance() { myIndex++; } public void retreat() { myIndex--; } public boolean atEnd() { return myIndex < 0 || myIndex >= myTokens.size(); } private TokenInfo getTokenInfo() { return myTokens.get(myIndex); } }; } public void setText(final CharSequence text) { } public void setEditor(final Editor editor) { LOG.assertTrue(!myHasEditor, "Highlighters cannot be reused with different editors"); myHasEditor = true; } public void setColorScheme(EditorColorsScheme scheme) { } } private static int findTokenInfoByOffset(final ArrayList<TokenInfo> tokens, final int offset) { int low = 0; int high = tokens.size() - 1; while(low <= high){ final int mid = (low + high) / 2; final TokenInfo midVal = tokens.get(mid); if (offset < midVal.startOffset){ high = mid - 1; } else if (offset >= midVal.endOffset){ low = mid + 1; } else{ return mid; } } return tokens.size(); } private static class MyTypedHandler implements TypedActionHandler { private TypedActionHandler myOriginalHandler; public MyTypedHandler(final TypedActionHandler originalAction) { myOriginalHandler = originalAction; } public void execute(final Editor editor, final char charTyped, final DataContext dataContext) { final ConsoleViewImpl consoleView = editor.getUserData(CONSOLE_VIEW_IN_EDITOR_VIEW); if (consoleView == null || !consoleView.myState.isRunning()){ myOriginalHandler.execute(editor, charTyped, dataContext); } else{ final String s = String.valueOf(charTyped); consoleView.print(s, ConsoleViewContentType.USER_INPUT); consoleView.flushDeferredText(); } } } private static final DataAccessor<ConsoleViewImpl> CONSOLE = new DataAccessor<ConsoleViewImpl>() { public ConsoleViewImpl getImpl(final DataContext dataContext) throws NoDataException { return EDITOR.getNotNull(dataContext).getUserData(CONSOLE_VIEW_IN_EDITOR_VIEW); } }; private static final Condition<ConsoleViewImpl> CONSOLE_IS_RUNNING = new Condition<ConsoleViewImpl>() { public boolean value(final ConsoleViewImpl consoleView) { return consoleView.myState.isRunning(); } }; private static final DataAccessor<ConsoleViewImpl> RUNNINT_CONSOLE =DataAccessor.createConditionalAccessor(CONSOLE, CONSOLE_IS_RUNNING); private static abstract class ConsoleAction extends AnAction { public void actionPerformed(final AnActionEvent e) { final ConsoleViewImpl console = RUNNINT_CONSOLE.from(e.getDataContext()); execute(console); } protected abstract void execute(ConsoleViewImpl console); public void update(final AnActionEvent e) { final ConsoleViewImpl console = RUNNINT_CONSOLE.from(e.getDataContext()); e.getPresentation().setEnabled(console != null); } } private static class EnterHandler extends ConsoleAction { public void execute(final ConsoleViewImpl consoleView) { consoleView.print("\n", ConsoleViewContentType.USER_INPUT); consoleView.flushDeferredText(); } } private static class PasteHandler extends ConsoleAction { public void execute(final ConsoleViewImpl consoleView) { final Transferable content = CopyPasteManager.getInstance().getContents(); if (content == null) return; String s = null; try { s = (String)content.getTransferData(DataFlavor.stringFlavor); } catch(Exception e) { consoleView.myEditor.getComponent().getToolkit().beep(); } if (s == null) return; consoleView.print(s, ConsoleViewContentType.USER_INPUT); consoleView.flushDeferredText(); } } private static class BackSpaceHandler extends ConsoleAction { public void execute(final ConsoleViewImpl consoleView) { final Editor editor = consoleView.myEditor; final Document document = editor.getDocument(); final int length = document.getTextLength(); if (length == 0) return; synchronized(consoleView.LOCK){ if (consoleView.myTokens.size() == 0) return; final TokenInfo info = consoleView.myTokens.get(consoleView.myTokens.size() - 1); if (info.contentType != ConsoleViewContentType.USER_INPUT) return; if (consoleView.myDeferredUserInput.length() == 0) return; consoleView.myDeferredUserInput.setLength(consoleView.myDeferredUserInput.length() - 1); info.endOffset -= 1; if (info.startOffset == info.endOffset){ consoleView.myTokens.remove(consoleView.myTokens.size() - 1); } consoleView.myContentSize--; } ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { document.deleteString(length - 1, length); editor.getCaretModel().moveToOffset(length - 1); editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); editor.getSelectionModel().removeSelection(); } }); } } private static class Hyperlinks { private static final int NO_INDEX = Integer.MIN_VALUE; private final Map<RangeHighlighter,HyperlinkInfo> myHighlighterToMessageInfoMap = new HashMap<RangeHighlighter, HyperlinkInfo>(); private int myLastIndex = NO_INDEX; public void clear() { myHighlighterToMessageInfoMap.clear(); myLastIndex = NO_INDEX; } public HyperlinkInfo getHyperlinkAt(final int offset) { final Iterator<RangeHighlighter> iterator = myHighlighterToMessageInfoMap.keySet().iterator(); while(iterator.hasNext()){ final RangeHighlighter highlighter = iterator.next(); if (containsOffset(offset, highlighter)){ return myHighlighterToMessageInfoMap.get(highlighter); } } return null; } private static boolean containsOffset(final int offset, final RangeHighlighter highlighter) { return highlighter.getStartOffset() <= offset && offset <= highlighter.getEndOffset(); } public void add(final RangeHighlighter highlighter, final HyperlinkInfo hyperlinkInfo) { myHighlighterToMessageInfoMap.put(highlighter, hyperlinkInfo); if (myLastIndex != NO_INDEX && containsOffset(myLastIndex, highlighter)) myLastIndex = NO_INDEX; } } }
Cosmetics
source/com/intellij/execution/impl/ConsoleViewImpl.java
Cosmetics
Java
apache-2.0
fee4f8a772a24a670cd5e786add6fe2fb6ede26a
0
xfournet/intellij-community,ibinti/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,allotria/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,signed/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,semonte/intellij-community,asedunov/intellij-community,allotria/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,xfournet/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,apixandru/intellij-community,da1z/intellij-community,FHannes/intellij-community,allotria/intellij-community,da1z/intellij-community,allotria/intellij-community,asedunov/intellij-community,FHannes/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,vvv1559/intellij-community,semonte/intellij-community,asedunov/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,asedunov/intellij-community,allotria/intellij-community,signed/intellij-community,FHannes/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,signed/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,ibinti/intellij-community,semonte/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,allotria/intellij-community,FHannes/intellij-community,ibinti/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,semonte/intellij-community,apixandru/intellij-community,da1z/intellij-community,apixandru/intellij-community,asedunov/intellij-community,xfournet/intellij-community,ibinti/intellij-community,allotria/intellij-community,asedunov/intellij-community,apixandru/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,signed/intellij-community,apixandru/intellij-community,semonte/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,da1z/intellij-community,xfournet/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,signed/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,ibinti/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,da1z/intellij-community,ibinti/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,asedunov/intellij-community,xfournet/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,asedunov/intellij-community,signed/intellij-community,xfournet/intellij-community,signed/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,signed/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,allotria/intellij-community,da1z/intellij-community,da1z/intellij-community,signed/intellij-community,suncycheng/intellij-community,signed/intellij-community,da1z/intellij-community,semonte/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,semonte/intellij-community,xfournet/intellij-community,FHannes/intellij-community,FHannes/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,asedunov/intellij-community,apixandru/intellij-community,xfournet/intellij-community,semonte/intellij-community,signed/intellij-community,xfournet/intellij-community,asedunov/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,FHannes/intellij-community
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.compiler.chainsSearch.completion.lookup; import com.intellij.codeInsight.completion.InsertionContext; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementDecorator; import com.intellij.codeInsight.lookup.LookupElementPresentation; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Ref; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.util.PsiTreeUtil; import org.jetbrains.annotations.NotNull; import java.util.Collection; /** * @author Dmitry Batkovich <[email protected]> */ public class ChainCompletionNewVariableLookupElement extends LookupElementDecorator<LookupElement> { private final static Logger LOG = Logger.getInstance(ChainCompletionNewVariableLookupElement.class); private final PsiClass myPsiClass; private final String myNewVarName; public ChainCompletionNewVariableLookupElement(final PsiClass psiClass, final String newVarName, final LookupElement calledMethods) { super(calledMethods); myNewVarName = newVarName; myPsiClass = psiClass; } public static ChainCompletionNewVariableLookupElement create(final PsiClass psiClass, final LookupElement calledMethods) { final Project project = psiClass.getProject(); final String newVarName = chooseLongestName(JavaCodeStyleManager.getInstance(project). suggestVariableName(VariableKind.LOCAL_VARIABLE, null, null, JavaPsiFacade.getElementFactory(project).createType(psiClass))); return new ChainCompletionNewVariableLookupElement(psiClass, newVarName, calledMethods); } @Override public void handleInsert(final InsertionContext context) { final RangeMarker rangeMarker = context.getDocument().createRangeMarker(context.getStartOffset(), context.getStartOffset()); getDelegate().handleInsert(context); final PsiFile file = context.getFile(); ((PsiJavaFile)file).importClass(myPsiClass); final PsiElement caretElement = file.findElementAt(context.getEditor().getCaretModel().getOffset()); if (caretElement == null) { LOG.error("element on caret position MUST BE not null"); return; } PsiElement prevSibling = caretElement.getPrevSibling(); final PsiStatement statement; if (prevSibling instanceof PsiStatement) { statement = (PsiStatement)prevSibling; } else { statement = PsiTreeUtil.getParentOfType(prevSibling, PsiStatement.class); } final PsiCodeBlock codeBlock = PsiTreeUtil.getParentOfType(statement, PsiCodeBlock.class); if (codeBlock == null) { LOG.error("code block MUST BE not null"); return; } final Project project = context.getProject(); final Ref<PsiElement> insertedStatementRef = Ref.create(); final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(project); context.commitDocument(); final PsiStatement statementFromText = elementFactory.createStatementFromText(String.format("%s %s = null;", myPsiClass.getName(), myNewVarName), null); insertedStatementRef.set(codeBlock.addBefore(statementFromText, statement)); final PsiLiteralExpression nullKeyword = findNull(insertedStatementRef.get()); PsiDocumentManager.getInstance(context.getProject()).doPostponedOperationsAndUnblockDocument(context.getDocument()); context.getDocument().insertString(rangeMarker.getStartOffset(), myNewVarName + "."); context.commitDocument(); final int offset = nullKeyword.getTextOffset(); final int endOffset = offset + nullKeyword.getTextLength(); context.getEditor().getSelectionModel().setSelection(offset, endOffset); context.getEditor().getCaretModel().moveToOffset(offset); } @NotNull @Override public String getLookupString() { return getDelegate().getLookupString(); } @Override public void renderElement(final LookupElementPresentation presentation) { super.renderElement(presentation); presentation.setItemText(myNewVarName + "." + presentation.getItemText()); } private static PsiLiteralExpression findNull(final PsiElement psiElement) { final Collection<PsiLiteralExpression> literalExpressions = PsiTreeUtil.findChildrenOfType(psiElement, PsiLiteralExpression.class); for (final PsiLiteralExpression literalExpression : literalExpressions) { if (PsiKeyword.NULL.equals(literalExpression.getText())) { return literalExpression; } } throw new IllegalArgumentException(); } private static String chooseLongestName(final SuggestedNameInfo suggestedNameInfo) { final String[] names = suggestedNameInfo.names; String longestWord = names[0]; int maxLength = longestWord.length(); for (int i = 1; i < names.length; i++) { final int length = names[i].length(); if (length > maxLength) { maxLength = length; longestWord = names[i]; } } return longestWord; } }
java/compiler/impl/src/com/intellij/compiler/chainsSearch/completion/lookup/ChainCompletionNewVariableLookupElement.java
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.compiler.chainsSearch.completion.lookup; import com.intellij.codeInsight.completion.InsertionContext; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementDecorator; import com.intellij.codeInsight.lookup.LookupElementPresentation; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Ref; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.util.PsiTreeUtil; import org.jetbrains.annotations.NotNull; import java.util.Collection; /** * @author Dmitry Batkovich <[email protected]> */ public class ChainCompletionNewVariableLookupElement extends LookupElementDecorator<LookupElement> { private final static Logger LOG = Logger.getInstance(ChainCompletionNewVariableLookupElement.class); private final PsiClass myPsiClass; private final String myNewVarName; public ChainCompletionNewVariableLookupElement(final PsiClass psiClass, final String newVarName, final LookupElement calledMethods) { super(calledMethods); myNewVarName = newVarName; myPsiClass = psiClass; } public static ChainCompletionNewVariableLookupElement create(final PsiClass psiClass, final LookupElement calledMethods) { final Project project = psiClass.getProject(); final String newVarName = chooseLongestName(JavaCodeStyleManager.getInstance(project). suggestVariableName(VariableKind.LOCAL_VARIABLE, null, null, JavaPsiFacade.getElementFactory(project).createType(psiClass))); return new ChainCompletionNewVariableLookupElement(psiClass, newVarName, calledMethods); } @Override public void handleInsert(final InsertionContext context) { final RangeMarker rangeMarker = context.getDocument().createRangeMarker(context.getStartOffset(), context.getStartOffset()); getDelegate().handleInsert(context); final PsiFile file = context.getFile(); ((PsiJavaFile)file).importClass(myPsiClass); final PsiElement caretElement = file.findElementAt(context.getEditor().getCaretModel().getOffset()); if (caretElement == null) { LOG.error("element on caret position MUST BE not null"); return; } final PsiStatement statement = (PsiStatement) caretElement.getPrevSibling(); final PsiCodeBlock codeBlock = PsiTreeUtil.getParentOfType(statement, PsiCodeBlock.class); if (codeBlock == null) { LOG.error("code block MUST BE not null"); return; } final Project project = context.getProject(); final Ref<PsiElement> insertedStatementRef = Ref.create(); final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(project); context.commitDocument(); final PsiStatement statementFromText = elementFactory.createStatementFromText(String.format("%s %s = null;", myPsiClass.getName(), myNewVarName), null); insertedStatementRef.set(codeBlock.addBefore(statementFromText, statement)); final PsiLiteralExpression nullKeyword = findNull(insertedStatementRef.get()); PsiDocumentManager.getInstance(context.getProject()).doPostponedOperationsAndUnblockDocument(context.getDocument()); context.getDocument().insertString(rangeMarker.getStartOffset(), myNewVarName + "."); context.commitDocument(); final int offset = nullKeyword.getTextOffset(); final int endOffset = offset + nullKeyword.getTextLength(); context.getEditor().getSelectionModel().setSelection(offset, endOffset); context.getEditor().getCaretModel().moveToOffset(offset); } @NotNull @Override public String getLookupString() { return getDelegate().getLookupString(); } @Override public void renderElement(final LookupElementPresentation presentation) { super.renderElement(presentation); presentation.setItemText(myNewVarName + "." + presentation.getItemText()); } private static PsiLiteralExpression findNull(final PsiElement psiElement) { final Collection<PsiLiteralExpression> literalExpressions = PsiTreeUtil.findChildrenOfType(psiElement, PsiLiteralExpression.class); for (final PsiLiteralExpression literalExpression : literalExpressions) { if (PsiKeyword.NULL.equals(literalExpression.getText())) { return literalExpression; } } throw new IllegalArgumentException(); } private static String chooseLongestName(final SuggestedNameInfo suggestedNameInfo) { final String[] names = suggestedNameInfo.names; String longestWord = names[0]; int maxLength = longestWord.length(); for (int i = 1; i < names.length; i++) { final int length = names[i].length(); if (length > maxLength) { maxLength = length; longestWord = names[i]; } } return longestWord; } }
fix completion insertion
java/compiler/impl/src/com/intellij/compiler/chainsSearch/completion/lookup/ChainCompletionNewVariableLookupElement.java
fix completion insertion
Java
apache-2.0
bb394d5e10d46fff6a1bf78622fcd570d7f757fc
0
xfournet/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,da1z/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,ibinti/intellij-community,xfournet/intellij-community,xfournet/intellij-community,ibinti/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,asedunov/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,apixandru/intellij-community,da1z/intellij-community,xfournet/intellij-community,ibinti/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,ibinti/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,allotria/intellij-community,allotria/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,asedunov/intellij-community,xfournet/intellij-community,da1z/intellij-community,allotria/intellij-community,allotria/intellij-community,da1z/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,asedunov/intellij-community,ibinti/intellij-community,ibinti/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,xfournet/intellij-community,da1z/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,allotria/intellij-community,ibinti/intellij-community,apixandru/intellij-community,asedunov/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,asedunov/intellij-community,allotria/intellij-community,asedunov/intellij-community,xfournet/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,da1z/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,da1z/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,xfournet/intellij-community
/* * Copyright (c) 2007-2009, Osmorc Development Team * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list * of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this * list of conditions and the following disclaimer in the documentation and/or other * materials provided with the distribution. * * Neither the name of 'Osmorc Development Team' nor the names of its contributors may be * used to endorse or promote products derived from this software without specific * prior written permission. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jetbrains.lang.manifest.header.impl; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.lang.manifest.header.HeaderParser; import org.jetbrains.lang.manifest.header.HeaderParserProvider; import java.util.Map; /** * @author Robert F. Beeger ([email protected]) */ public class StandardManifestHeaderParsers implements HeaderParserProvider { private final Map<String, HeaderParser> myParsers; public StandardManifestHeaderParsers() { myParsers = ContainerUtil.newHashMap(); myParsers.put("Manifest-Version", StandardHeaderParser.INSTANCE); myParsers.put("Created-By", StandardHeaderParser.INSTANCE); myParsers.put("Signature-Version", StandardHeaderParser.INSTANCE); myParsers.put("Class-Path", StandardHeaderParser.INSTANCE); myParsers.put(ClassReferenceParser.MAIN_CLASS, ClassReferenceParser.INSTANCE); myParsers.put("Implementation-Title", StandardHeaderParser.INSTANCE); myParsers.put("Implementation-Version", StandardHeaderParser.INSTANCE); myParsers.put("Implementation-Vendor", StandardHeaderParser.INSTANCE); myParsers.put("Implementation-Vendor-Id", StandardHeaderParser.INSTANCE); myParsers.put("Implementation-URL", StandardHeaderParser.INSTANCE); myParsers.put("Specification-Title", StandardHeaderParser.INSTANCE); myParsers.put("Specification-Version", StandardHeaderParser.INSTANCE); myParsers.put("Specification-Vendor", StandardHeaderParser.INSTANCE); myParsers.put("Sealed", StandardHeaderParser.INSTANCE); myParsers.put("Name", StandardHeaderParser.INSTANCE); myParsers.put("Content-Type", StandardHeaderParser.INSTANCE); myParsers.put("Java-Bean", StandardHeaderParser.INSTANCE); myParsers.put("MD5-Digest", StandardHeaderParser.INSTANCE); myParsers.put("SHA-Digest", StandardHeaderParser.INSTANCE); myParsers.put("Magic", StandardHeaderParser.INSTANCE); myParsers.put(ClassReferenceParser.PREMAIN_CLASS, ClassReferenceParser.INSTANCE); myParsers.put(ClassReferenceParser.AGENT_CLASS, ClassReferenceParser.INSTANCE); myParsers.put("Boot-Class-Path", StandardHeaderParser.INSTANCE); myParsers.put("Can-Redefine-Classes", StandardHeaderParser.INSTANCE); myParsers.put("Can-Retransform-Classes", StandardHeaderParser.INSTANCE); myParsers.put("Can-Set-Native-Method-Prefix", StandardHeaderParser.INSTANCE); myParsers.put("Automatic-Module-Name", StandardHeaderParser.INSTANCE); myParsers.put("Multi-Release", StandardHeaderParser.INSTANCE); } @NotNull @Override public Map<String, HeaderParser> getHeaderParsers() { return myParsers; } }
java/manifest/src/org/jetbrains/lang/manifest/header/impl/StandardManifestHeaderParsers.java
/* * Copyright (c) 2007-2009, Osmorc Development Team * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list * of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this * list of conditions and the following disclaimer in the documentation and/or other * materials provided with the distribution. * * Neither the name of 'Osmorc Development Team' nor the names of its contributors may be * used to endorse or promote products derived from this software without specific * prior written permission. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.jetbrains.lang.manifest.header.impl; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.lang.manifest.header.HeaderParser; import org.jetbrains.lang.manifest.header.HeaderParserProvider; import java.util.Map; /** * @author Robert F. Beeger ([email protected]) */ public class StandardManifestHeaderParsers implements HeaderParserProvider { private final Map<String, HeaderParser> myParsers; public StandardManifestHeaderParsers() { myParsers = ContainerUtil.newHashMap(); myParsers.put("Manifest-Version", StandardHeaderParser.INSTANCE); myParsers.put("Created-By", StandardHeaderParser.INSTANCE); myParsers.put("Signature-Version", StandardHeaderParser.INSTANCE); myParsers.put("Class-Path", StandardHeaderParser.INSTANCE); myParsers.put(ClassReferenceParser.MAIN_CLASS, ClassReferenceParser.INSTANCE); myParsers.put("Implementation-Title", StandardHeaderParser.INSTANCE); myParsers.put("Implementation-Version", StandardHeaderParser.INSTANCE); myParsers.put("Implementation-Vendor", StandardHeaderParser.INSTANCE); myParsers.put("Implementation-Vendor-Id", StandardHeaderParser.INSTANCE); myParsers.put("Implementation-URL", StandardHeaderParser.INSTANCE); myParsers.put("Specification-Title", StandardHeaderParser.INSTANCE); myParsers.put("Specification-Version", StandardHeaderParser.INSTANCE); myParsers.put("Specification-Vendor", StandardHeaderParser.INSTANCE); myParsers.put("Sealed", StandardHeaderParser.INSTANCE); myParsers.put("Name", StandardHeaderParser.INSTANCE); myParsers.put("Content-Type", StandardHeaderParser.INSTANCE); myParsers.put("Java-Bean", StandardHeaderParser.INSTANCE); myParsers.put("MD5-Digest", StandardHeaderParser.INSTANCE); myParsers.put("SHA-Digest", StandardHeaderParser.INSTANCE); myParsers.put("Magic", StandardHeaderParser.INSTANCE); myParsers.put(ClassReferenceParser.PREMAIN_CLASS, ClassReferenceParser.INSTANCE); myParsers.put(ClassReferenceParser.AGENT_CLASS, ClassReferenceParser.INSTANCE); myParsers.put("Boot-Class-Path", StandardHeaderParser.INSTANCE); myParsers.put("Can-Redefine-Classes", StandardHeaderParser.INSTANCE); myParsers.put("Can-Retransform-Classes", StandardHeaderParser.INSTANCE); myParsers.put("Can-Set-Native-Method-Prefix", StandardHeaderParser.INSTANCE); } @NotNull @Override public Map<String, HeaderParser> getHeaderParsers() { return myParsers; } }
[java] recognizes "Automatic-Module-Name" and "Multi-Release" in manifests
java/manifest/src/org/jetbrains/lang/manifest/header/impl/StandardManifestHeaderParsers.java
[java] recognizes "Automatic-Module-Name" and "Multi-Release" in manifests
Java
apache-2.0
2cb96f6e6d7ad160e73eb0ee72ea511dce32f802
0
titusfortner/selenium,jabbrwcky/selenium,lmtierney/selenium,rovner/selenium,carsonmcdonald/selenium,tbeadle/selenium,TikhomirovSergey/selenium,Dude-X/selenium,customcommander/selenium,gorlemik/selenium,zenefits/selenium,tkurnosova/selenium,sag-enorman/selenium,carlosroh/selenium,sankha93/selenium,bayandin/selenium,minhthuanit/selenium,valfirst/selenium,BlackSmith/selenium,JosephCastro/selenium,dimacus/selenium,rovner/selenium,minhthuanit/selenium,gotcha/selenium,doungni/selenium,krosenvold/selenium,freynaud/selenium,Tom-Trumper/selenium,5hawnknight/selenium,tkurnosova/selenium,gabrielsimas/selenium,SouWilliams/selenium,o-schneider/selenium,gotcha/selenium,Sravyaksr/selenium,jsarenik/jajomojo-selenium,s2oBCN/selenium,petruc/selenium,sri85/selenium,blueyed/selenium,dbo/selenium,chrsmithdemos/selenium,i17c/selenium,thanhpete/selenium,Dude-X/selenium,actmd/selenium,soundcloud/selenium,clavery/selenium,bmannix/selenium,Herst/selenium,MCGallaspy/selenium,blackboarddd/selenium,tkurnosova/selenium,vinay-qa/vinayit-android-server-apk,kalyanjvn1/selenium,Jarob22/selenium,xsyntrex/selenium,MeetMe/selenium,isaksky/selenium,bartolkaruza/selenium,valfirst/selenium,amikey/selenium,mach6/selenium,asashour/selenium,gorlemik/selenium,wambat/selenium,lmtierney/selenium,stupidnetizen/selenium,freynaud/selenium,orange-tv-blagnac/selenium,asolntsev/selenium,amikey/selenium,knorrium/selenium,twalpole/selenium,JosephCastro/selenium,misttechnologies/selenium,jerome-jacob/selenium,petruc/selenium,isaksky/selenium,zenefits/selenium,telefonicaid/selenium,dimacus/selenium,lmtierney/selenium,MeetMe/selenium,5hawnknight/selenium,gregerrag/selenium,oddui/selenium,petruc/selenium,HtmlUnit/selenium,manuelpirez/selenium,orange-tv-blagnac/selenium,yukaReal/selenium,anshumanchatterji/selenium,joshmgrant/selenium,MCGallaspy/selenium,SouWilliams/selenium,amikey/selenium,Tom-Trumper/selenium,freynaud/selenium,lukeis/selenium,TheBlackTuxCorp/selenium,lrowe/selenium,asolntsev/selenium,lmtierney/selenium,Appdynamics/selenium,Dude-X/selenium,dcjohnson1989/selenium,soundcloud/selenium,Tom-Trumper/selenium,alexec/selenium,SevInf/IEDriver,mojwang/selenium,blueyed/selenium,wambat/selenium,rovner/selenium,onedox/selenium,sebady/selenium,SeleniumHQ/selenium,bmannix/selenium,dcjohnson1989/selenium,dimacus/selenium,telefonicaid/selenium,temyers/selenium,xsyntrex/selenium,actmd/selenium,vveliev/selenium,JosephCastro/selenium,minhthuanit/selenium,dbo/selenium,bartolkaruza/selenium,quoideneuf/selenium,SevInf/IEDriver,o-schneider/selenium,HtmlUnit/selenium,chrisblock/selenium,joshuaduffy/selenium,rrussell39/selenium,i17c/selenium,quoideneuf/selenium,rrussell39/selenium,MeetMe/selenium,rrussell39/selenium,sebady/selenium,orange-tv-blagnac/selenium,sevaseva/selenium,meksh/selenium,alb-i986/selenium,mach6/selenium,gotcha/selenium,petruc/selenium,Tom-Trumper/selenium,yukaReal/selenium,davehunt/selenium,krosenvold/selenium,RamaraoDonta/ramarao-clone,tarlabs/selenium,AutomatedTester/selenium,dibagga/selenium,AutomatedTester/selenium,Appdynamics/selenium,markodolancic/selenium,asolntsev/selenium,SeleniumHQ/selenium,sankha93/selenium,clavery/selenium,mach6/selenium,quoideneuf/selenium,HtmlUnit/selenium,Jarob22/selenium,i17c/selenium,compstak/selenium,clavery/selenium,joshmgrant/selenium,anshumanchatterji/selenium,vveliev/selenium,compstak/selenium,blackboarddd/selenium,gotcha/selenium,chrisblock/selenium,thanhpete/selenium,mojwang/selenium,thanhpete/selenium,o-schneider/selenium,chrisblock/selenium,aluedeke/chromedriver,mach6/selenium,Sravyaksr/selenium,tarlabs/selenium,krmahadevan/selenium,MCGallaspy/selenium,sankha93/selenium,gorlemik/selenium,alexec/selenium,jsarenik/jajomojo-selenium,rovner/selenium,aluedeke/chromedriver,vveliev/selenium,juangj/selenium,krosenvold/selenium,slongwang/selenium,vinay-qa/vinayit-android-server-apk,lilredindy/selenium,davehunt/selenium,valfirst/selenium,Dude-X/selenium,isaksky/selenium,titusfortner/selenium,skurochkin/selenium,DrMarcII/selenium,soundcloud/selenium,5hawnknight/selenium,MeetMe/selenium,GorK-ChO/selenium,anshumanchatterji/selenium,doungni/selenium,titusfortner/selenium,customcommander/selenium,lmtierney/selenium,AutomatedTester/selenium,wambat/selenium,vinay-qa/vinayit-android-server-apk,amikey/selenium,knorrium/selenium,eric-stanley/selenium,mach6/selenium,lrowe/selenium,alb-i986/selenium,RamaraoDonta/ramarao-clone,SeleniumHQ/selenium,telefonicaid/selenium,rrussell39/selenium,orange-tv-blagnac/selenium,freynaud/selenium,TheBlackTuxCorp/selenium,dibagga/selenium,meksh/selenium,bartolkaruza/selenium,jabbrwcky/selenium,misttechnologies/selenium,stupidnetizen/selenium,arunsingh/selenium,Appdynamics/selenium,mestihudson/selenium,BlackSmith/selenium,jknguyen/josephknguyen-selenium,actmd/selenium,MeetMe/selenium,gabrielsimas/selenium,gabrielsimas/selenium,jsakamoto/selenium,tkurnosova/selenium,bayandin/selenium,clavery/selenium,SouWilliams/selenium,bayandin/selenium,tbeadle/selenium,wambat/selenium,valfirst/selenium,twalpole/selenium,xmhubj/selenium,xsyntrex/selenium,sevaseva/selenium,vveliev/selenium,lilredindy/selenium,Dude-X/selenium,Appdynamics/selenium,carsonmcdonald/selenium,MeetMe/selenium,jsakamoto/selenium,asolntsev/selenium,stupidnetizen/selenium,wambat/selenium,gurayinan/selenium,minhthuanit/selenium,pulkitsinghal/selenium,freynaud/selenium,alexec/selenium,customcommander/selenium,actmd/selenium,rrussell39/selenium,gregerrag/selenium,uchida/selenium,arunsingh/selenium,blackboarddd/selenium,wambat/selenium,dbo/selenium,joshmgrant/selenium,vinay-qa/vinayit-android-server-apk,soundcloud/selenium,skurochkin/selenium,anshumanchatterji/selenium,RamaraoDonta/ramarao-clone,chrisblock/selenium,soundcloud/selenium,customcommander/selenium,o-schneider/selenium,vveliev/selenium,sankha93/selenium,krosenvold/selenium,markodolancic/selenium,livioc/selenium,slongwang/selenium,kalyanjvn1/selenium,gorlemik/selenium,amar-sharma/selenium,TheBlackTuxCorp/selenium,SevInf/IEDriver,sebady/selenium,jsakamoto/selenium,houchj/selenium,gorlemik/selenium,alb-i986/selenium,chrsmithdemos/selenium,thanhpete/selenium,mojwang/selenium,manuelpirez/selenium,gotcha/selenium,s2oBCN/selenium,jknguyen/josephknguyen-selenium,xsyntrex/selenium,blackboarddd/selenium,dcjohnson1989/selenium,compstak/selenium,dbo/selenium,rovner/selenium,jabbrwcky/selenium,asashour/selenium,slongwang/selenium,sevaseva/selenium,juangj/selenium,BlackSmith/selenium,Sravyaksr/selenium,MeetMe/selenium,blueyed/selenium,tkurnosova/selenium,jerome-jacob/selenium,misttechnologies/selenium,kalyanjvn1/selenium,chrisblock/selenium,arunsingh/selenium,zenefits/selenium,juangj/selenium,carlosroh/selenium,uchida/selenium,kalyanjvn1/selenium,doungni/selenium,DrMarcII/selenium,SouWilliams/selenium,dandv/selenium,gotcha/selenium,mach6/selenium,sag-enorman/selenium,DrMarcII/selenium,chrisblock/selenium,onedox/selenium,joshbruning/selenium,TheBlackTuxCorp/selenium,dandv/selenium,bartolkaruza/selenium,arunsingh/selenium,asolntsev/selenium,jabbrwcky/selenium,rplevka/selenium,onedox/selenium,mojwang/selenium,mojwang/selenium,TikhomirovSergey/selenium,clavery/selenium,slongwang/selenium,markodolancic/selenium,lukeis/selenium,anshumanchatterji/selenium,houchj/selenium,BlackSmith/selenium,gorlemik/selenium,amar-sharma/selenium,bartolkaruza/selenium,freynaud/selenium,customcommander/selenium,freynaud/selenium,joshbruning/selenium,slongwang/selenium,jsarenik/jajomojo-selenium,davehunt/selenium,dbo/selenium,bartolkaruza/selenium,lilredindy/selenium,davehunt/selenium,tkurnosova/selenium,livioc/selenium,stupidnetizen/selenium,vinay-qa/vinayit-android-server-apk,joshbruning/selenium,orange-tv-blagnac/selenium,jsakamoto/selenium,juangj/selenium,mestihudson/selenium,alb-i986/selenium,mestihudson/selenium,joshbruning/selenium,AutomatedTester/selenium,Dude-X/selenium,chrsmithdemos/selenium,lukeis/selenium,gregerrag/selenium,vinay-qa/vinayit-android-server-apk,Jarob22/selenium,Tom-Trumper/selenium,gurayinan/selenium,gregerrag/selenium,o-schneider/selenium,quoideneuf/selenium,sri85/selenium,TikhomirovSergey/selenium,sag-enorman/selenium,joshuaduffy/selenium,carlosroh/selenium,minhthuanit/selenium,asashour/selenium,freynaud/selenium,manuelpirez/selenium,mojwang/selenium,xmhubj/selenium,livioc/selenium,minhthuanit/selenium,JosephCastro/selenium,meksh/selenium,carsonmcdonald/selenium,alexec/selenium,dkentw/selenium,SevInf/IEDriver,valfirst/selenium,vinay-qa/vinayit-android-server-apk,BlackSmith/selenium,asashour/selenium,pulkitsinghal/selenium,stupidnetizen/selenium,xmhubj/selenium,vveliev/selenium,dibagga/selenium,zenefits/selenium,lrowe/selenium,HtmlUnit/selenium,dbo/selenium,gregerrag/selenium,jerome-jacob/selenium,mach6/selenium,meksh/selenium,bmannix/selenium,MCGallaspy/selenium,isaksky/selenium,livioc/selenium,gabrielsimas/selenium,5hawnknight/selenium,5hawnknight/selenium,soundcloud/selenium,AutomatedTester/selenium,gabrielsimas/selenium,i17c/selenium,valfirst/selenium,lrowe/selenium,dandv/selenium,krmahadevan/selenium,Ardesco/selenium,gurayinan/selenium,actmd/selenium,lrowe/selenium,SeleniumHQ/selenium,krmahadevan/selenium,compstak/selenium,eric-stanley/selenium,chrsmithdemos/selenium,o-schneider/selenium,slongwang/selenium,DrMarcII/selenium,sevaseva/selenium,s2oBCN/selenium,jknguyen/josephknguyen-selenium,skurochkin/selenium,houchj/selenium,markodolancic/selenium,tbeadle/selenium,dcjohnson1989/selenium,dandv/selenium,livioc/selenium,GorK-ChO/selenium,sri85/selenium,joshmgrant/selenium,dbo/selenium,TheBlackTuxCorp/selenium,RamaraoDonta/ramarao-clone,joshuaduffy/selenium,actmd/selenium,isaksky/selenium,minhthuanit/selenium,sag-enorman/selenium,twalpole/selenium,temyers/selenium,orange-tv-blagnac/selenium,o-schneider/selenium,p0deje/selenium,p0deje/selenium,AutomatedTester/selenium,joshuaduffy/selenium,joshmgrant/selenium,pulkitsinghal/selenium,pulkitsinghal/selenium,gemini-testing/selenium,sevaseva/selenium,Tom-Trumper/selenium,aluedeke/chromedriver,jerome-jacob/selenium,JosephCastro/selenium,knorrium/selenium,blackboarddd/selenium,i17c/selenium,mestihudson/selenium,markodolancic/selenium,tarlabs/selenium,mestihudson/selenium,SevInf/IEDriver,aluedeke/chromedriver,pulkitsinghal/selenium,valfirst/selenium,onedox/selenium,gregerrag/selenium,soundcloud/selenium,customcommander/selenium,gotcha/selenium,anshumanchatterji/selenium,titusfortner/selenium,TikhomirovSergey/selenium,p0deje/selenium,carsonmcdonald/selenium,SouWilliams/selenium,davehunt/selenium,SouWilliams/selenium,jerome-jacob/selenium,onedox/selenium,telefonicaid/selenium,markodolancic/selenium,Ardesco/selenium,s2oBCN/selenium,gotcha/selenium,yukaReal/selenium,dibagga/selenium,gabrielsimas/selenium,jsakamoto/selenium,petruc/selenium,customcommander/selenium,mestihudson/selenium,mestihudson/selenium,actmd/selenium,TikhomirovSergey/selenium,aluedeke/chromedriver,5hawnknight/selenium,eric-stanley/selenium,jsarenik/jajomojo-selenium,asashour/selenium,doungni/selenium,isaksky/selenium,tbeadle/selenium,bartolkaruza/selenium,blueyed/selenium,davehunt/selenium,knorrium/selenium,asolntsev/selenium,chrsmithdemos/selenium,sag-enorman/selenium,eric-stanley/selenium,carlosroh/selenium,temyers/selenium,quoideneuf/selenium,Appdynamics/selenium,Herst/selenium,Dude-X/selenium,xsyntrex/selenium,slongwang/selenium,bayandin/selenium,s2oBCN/selenium,gemini-testing/selenium,livioc/selenium,aluedeke/chromedriver,amar-sharma/selenium,lmtierney/selenium,compstak/selenium,valfirst/selenium,Jarob22/selenium,SeleniumHQ/selenium,uchida/selenium,titusfortner/selenium,wambat/selenium,actmd/selenium,kalyanjvn1/selenium,MeetMe/selenium,Sravyaksr/selenium,orange-tv-blagnac/selenium,gemini-testing/selenium,blackboarddd/selenium,lilredindy/selenium,chrisblock/selenium,jabbrwcky/selenium,temyers/selenium,SeleniumHQ/selenium,krosenvold/selenium,AutomatedTester/selenium,jerome-jacob/selenium,houchj/selenium,livioc/selenium,gemini-testing/selenium,Herst/selenium,rplevka/selenium,MCGallaspy/selenium,amar-sharma/selenium,krmahadevan/selenium,blackboarddd/selenium,meksh/selenium,petruc/selenium,i17c/selenium,soundcloud/selenium,yukaReal/selenium,sevaseva/selenium,manuelpirez/selenium,chrisblock/selenium,i17c/selenium,thanhpete/selenium,houchj/selenium,dcjohnson1989/selenium,stupidnetizen/selenium,doungni/selenium,chrisblock/selenium,oddui/selenium,quoideneuf/selenium,mestihudson/selenium,DrMarcII/selenium,dbo/selenium,stupidnetizen/selenium,lukeis/selenium,titusfortner/selenium,uchida/selenium,joshbruning/selenium,yukaReal/selenium,alexec/selenium,sankha93/selenium,lilredindy/selenium,isaksky/selenium,sankha93/selenium,p0deje/selenium,carlosroh/selenium,dibagga/selenium,DrMarcII/selenium,quoideneuf/selenium,thanhpete/selenium,juangj/selenium,SouWilliams/selenium,TheBlackTuxCorp/selenium,sag-enorman/selenium,livioc/selenium,skurochkin/selenium,stupidnetizen/selenium,joshmgrant/selenium,knorrium/selenium,lilredindy/selenium,GorK-ChO/selenium,Ardesco/selenium,gregerrag/selenium,rplevka/selenium,telefonicaid/selenium,jabbrwcky/selenium,meksh/selenium,titusfortner/selenium,p0deje/selenium,carsonmcdonald/selenium,HtmlUnit/selenium,o-schneider/selenium,oddui/selenium,amikey/selenium,blackboarddd/selenium,Sravyaksr/selenium,joshbruning/selenium,gemini-testing/selenium,rrussell39/selenium,clavery/selenium,yukaReal/selenium,vveliev/selenium,bayandin/selenium,sebady/selenium,gregerrag/selenium,carlosroh/selenium,RamaraoDonta/ramarao-clone,dimacus/selenium,sag-enorman/selenium,xmhubj/selenium,skurochkin/selenium,stupidnetizen/selenium,lmtierney/selenium,tarlabs/selenium,thanhpete/selenium,manuelpirez/selenium,amar-sharma/selenium,bmannix/selenium,MCGallaspy/selenium,zenefits/selenium,wambat/selenium,mojwang/selenium,asashour/selenium,gorlemik/selenium,i17c/selenium,onedox/selenium,krmahadevan/selenium,gabrielsimas/selenium,pulkitsinghal/selenium,TikhomirovSergey/selenium,blueyed/selenium,joshmgrant/selenium,joshuaduffy/selenium,davehunt/selenium,livioc/selenium,twalpole/selenium,valfirst/selenium,dandv/selenium,compstak/selenium,dandv/selenium,markodolancic/selenium,knorrium/selenium,Sravyaksr/selenium,manuelpirez/selenium,chrsmithdemos/selenium,tbeadle/selenium,lilredindy/selenium,dcjohnson1989/selenium,sag-enorman/selenium,TheBlackTuxCorp/selenium,gorlemik/selenium,zenefits/selenium,denis-vilyuzhanin/selenium-fastview,doungni/selenium,sri85/selenium,Herst/selenium,misttechnologies/selenium,krosenvold/selenium,jsakamoto/selenium,kalyanjvn1/selenium,alexec/selenium,rplevka/selenium,lrowe/selenium,tarlabs/selenium,jknguyen/josephknguyen-selenium,TheBlackTuxCorp/selenium,isaksky/selenium,BlackSmith/selenium,MeetMe/selenium,jsakamoto/selenium,Dude-X/selenium,slongwang/selenium,jabbrwcky/selenium,SevInf/IEDriver,rplevka/selenium,jerome-jacob/selenium,denis-vilyuzhanin/selenium-fastview,GorK-ChO/selenium,GorK-ChO/selenium,misttechnologies/selenium,gurayinan/selenium,gabrielsimas/selenium,chrsmithdemos/selenium,petruc/selenium,HtmlUnit/selenium,lukeis/selenium,uchida/selenium,slongwang/selenium,gabrielsimas/selenium,rplevka/selenium,zenefits/selenium,sri85/selenium,dkentw/selenium,bmannix/selenium,Herst/selenium,amar-sharma/selenium,bartolkaruza/selenium,gregerrag/selenium,bayandin/selenium,bmannix/selenium,skurochkin/selenium,DrMarcII/selenium,rovner/selenium,SeleniumHQ/selenium,amar-sharma/selenium,gemini-testing/selenium,krosenvold/selenium,RamaraoDonta/ramarao-clone,JosephCastro/selenium,rplevka/selenium,HtmlUnit/selenium,carlosroh/selenium,pulkitsinghal/selenium,vveliev/selenium,alb-i986/selenium,telefonicaid/selenium,asashour/selenium,compstak/selenium,Tom-Trumper/selenium,Herst/selenium,freynaud/selenium,asolntsev/selenium,jerome-jacob/selenium,5hawnknight/selenium,lukeis/selenium,mojwang/selenium,joshuaduffy/selenium,temyers/selenium,sebady/selenium,dkentw/selenium,amikey/selenium,GorK-ChO/selenium,oddui/selenium,skurochkin/selenium,arunsingh/selenium,joshuaduffy/selenium,dcjohnson1989/selenium,compstak/selenium,jsakamoto/selenium,asashour/selenium,mach6/selenium,jknguyen/josephknguyen-selenium,uchida/selenium,sri85/selenium,sevaseva/selenium,doungni/selenium,carsonmcdonald/selenium,amikey/selenium,SevInf/IEDriver,twalpole/selenium,s2oBCN/selenium,yukaReal/selenium,uchida/selenium,dkentw/selenium,carsonmcdonald/selenium,xsyntrex/selenium,eric-stanley/selenium,pulkitsinghal/selenium,carlosroh/selenium,carsonmcdonald/selenium,tbeadle/selenium,p0deje/selenium,TikhomirovSergey/selenium,joshbruning/selenium,alexec/selenium,houchj/selenium,Appdynamics/selenium,gotcha/selenium,HtmlUnit/selenium,joshbruning/selenium,doungni/selenium,Sravyaksr/selenium,blackboarddd/selenium,dibagga/selenium,tbeadle/selenium,xsyntrex/selenium,dbo/selenium,sri85/selenium,compstak/selenium,bayandin/selenium,bayandin/selenium,jsarenik/jajomojo-selenium,houchj/selenium,mach6/selenium,denis-vilyuzhanin/selenium-fastview,SeleniumHQ/selenium,mestihudson/selenium,anshumanchatterji/selenium,temyers/selenium,GorK-ChO/selenium,telefonicaid/selenium,twalpole/selenium,jsarenik/jajomojo-selenium,arunsingh/selenium,wambat/selenium,sebady/selenium,kalyanjvn1/selenium,dkentw/selenium,MCGallaspy/selenium,Herst/selenium,krmahadevan/selenium,yukaReal/selenium,jsarenik/jajomojo-selenium,knorrium/selenium,actmd/selenium,temyers/selenium,misttechnologies/selenium,titusfortner/selenium,onedox/selenium,HtmlUnit/selenium,xmhubj/selenium,Herst/selenium,sevaseva/selenium,sri85/selenium,xsyntrex/selenium,jerome-jacob/selenium,joshuaduffy/selenium,gemini-testing/selenium,SouWilliams/selenium,p0deje/selenium,5hawnknight/selenium,manuelpirez/selenium,o-schneider/selenium,JosephCastro/selenium,tbeadle/selenium,alb-i986/selenium,sankha93/selenium,dibagga/selenium,twalpole/selenium,twalpole/selenium,alexec/selenium,anshumanchatterji/selenium,lmtierney/selenium,RamaraoDonta/ramarao-clone,AutomatedTester/selenium,joshmgrant/selenium,amar-sharma/selenium,oddui/selenium,eric-stanley/selenium,dibagga/selenium,manuelpirez/selenium,rplevka/selenium,dimacus/selenium,markodolancic/selenium,rovner/selenium,titusfortner/selenium,vinay-qa/vinayit-android-server-apk,orange-tv-blagnac/selenium,eric-stanley/selenium,denis-vilyuzhanin/selenium-fastview,jknguyen/josephknguyen-selenium,sankha93/selenium,dimacus/selenium,arunsingh/selenium,Jarob22/selenium,sebady/selenium,misttechnologies/selenium,meksh/selenium,dkentw/selenium,lrowe/selenium,Herst/selenium,dimacus/selenium,denis-vilyuzhanin/selenium-fastview,Tom-Trumper/selenium,temyers/selenium,BlackSmith/selenium,AutomatedTester/selenium,tkurnosova/selenium,Ardesco/selenium,Sravyaksr/selenium,isaksky/selenium,joshbruning/selenium,lukeis/selenium,telefonicaid/selenium,kalyanjvn1/selenium,jsarenik/jajomojo-selenium,misttechnologies/selenium,MCGallaspy/selenium,juangj/selenium,knorrium/selenium,uchida/selenium,zenefits/selenium,TheBlackTuxCorp/selenium,telefonicaid/selenium,dibagga/selenium,minhthuanit/selenium,tbeadle/selenium,denis-vilyuzhanin/selenium-fastview,tarlabs/selenium,s2oBCN/selenium,s2oBCN/selenium,aluedeke/chromedriver,dcjohnson1989/selenium,valfirst/selenium,xmhubj/selenium,sevaseva/selenium,alb-i986/selenium,SevInf/IEDriver,skurochkin/selenium,krosenvold/selenium,xmhubj/selenium,oddui/selenium,lmtierney/selenium,onedox/selenium,Jarob22/selenium,krmahadevan/selenium,clavery/selenium,krosenvold/selenium,Jarob22/selenium,vinay-qa/vinayit-android-server-apk,eric-stanley/selenium,dkentw/selenium,sag-enorman/selenium,davehunt/selenium,xmhubj/selenium,customcommander/selenium,titusfortner/selenium,sebady/selenium,Ardesco/selenium,joshmgrant/selenium,Appdynamics/selenium,gemini-testing/selenium,rovner/selenium,JosephCastro/selenium,BlackSmith/selenium,sri85/selenium,lukeis/selenium,asolntsev/selenium,SouWilliams/selenium,rovner/selenium,denis-vilyuzhanin/selenium-fastview,clavery/selenium,alb-i986/selenium,lilredindy/selenium,mojwang/selenium,bmannix/selenium,Ardesco/selenium,5hawnknight/selenium,joshmgrant/selenium,jsarenik/jajomojo-selenium,anshumanchatterji/selenium,oddui/selenium,sankha93/selenium,RamaraoDonta/ramarao-clone,titusfortner/selenium,lilredindy/selenium,valfirst/selenium,BlackSmith/selenium,tarlabs/selenium,petruc/selenium,carlosroh/selenium,xsyntrex/selenium,blueyed/selenium,lrowe/selenium,pulkitsinghal/selenium,GorK-ChO/selenium,GorK-ChO/selenium,amikey/selenium,alexec/selenium,asashour/selenium,xmhubj/selenium,jabbrwcky/selenium,arunsingh/selenium,thanhpete/selenium,DrMarcII/selenium,gurayinan/selenium,jknguyen/josephknguyen-selenium,quoideneuf/selenium,chrsmithdemos/selenium,Sravyaksr/selenium,knorrium/selenium,krmahadevan/selenium,dkentw/selenium,minhthuanit/selenium,bartolkaruza/selenium,thanhpete/selenium,DrMarcII/selenium,meksh/selenium,twalpole/selenium,TikhomirovSergey/selenium,gurayinan/selenium,arunsingh/selenium,Ardesco/selenium,kalyanjvn1/selenium,blueyed/selenium,tarlabs/selenium,tkurnosova/selenium,JosephCastro/selenium,orange-tv-blagnac/selenium,dandv/selenium,joshuaduffy/selenium,quoideneuf/selenium,gurayinan/selenium,gurayinan/selenium,juangj/selenium,dimacus/selenium,blueyed/selenium,houchj/selenium,amar-sharma/selenium,eric-stanley/selenium,rrussell39/selenium,p0deje/selenium,dkentw/selenium,dandv/selenium,uchida/selenium,juangj/selenium,gorlemik/selenium,blueyed/selenium,rrussell39/selenium,rrussell39/selenium,Jarob22/selenium,lukeis/selenium,dcjohnson1989/selenium,yukaReal/selenium,houchj/selenium,asolntsev/selenium,aluedeke/chromedriver,petruc/selenium,jknguyen/josephknguyen-selenium,rplevka/selenium,Appdynamics/selenium,meksh/selenium,jabbrwcky/selenium,tarlabs/selenium,soundcloud/selenium,bmannix/selenium,Jarob22/selenium,i17c/selenium,SevInf/IEDriver,HtmlUnit/selenium,carsonmcdonald/selenium,skurochkin/selenium,amikey/selenium,s2oBCN/selenium,Ardesco/selenium,alb-i986/selenium,MCGallaspy/selenium,gurayinan/selenium,davehunt/selenium,krmahadevan/selenium,denis-vilyuzhanin/selenium-fastview,markodolancic/selenium,oddui/selenium,dandv/selenium,manuelpirez/selenium,Ardesco/selenium,juangj/selenium,lrowe/selenium,chrsmithdemos/selenium,sebady/selenium,gemini-testing/selenium,aluedeke/chromedriver,RamaraoDonta/ramarao-clone,doungni/selenium,denis-vilyuzhanin/selenium-fastview,TikhomirovSergey/selenium,p0deje/selenium,bmannix/selenium,misttechnologies/selenium,SeleniumHQ/selenium,zenefits/selenium,temyers/selenium,clavery/selenium,Dude-X/selenium,jsakamoto/selenium,Tom-Trumper/selenium,SeleniumHQ/selenium,jknguyen/josephknguyen-selenium,bayandin/selenium,SeleniumHQ/selenium,tkurnosova/selenium,onedox/selenium,oddui/selenium,vveliev/selenium,customcommander/selenium,joshmgrant/selenium,Appdynamics/selenium,dimacus/selenium
/* Copyright 2011 Selenium committers Copyright 2011 Software Freedom Conservancy Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.grid.web; import com.google.common.collect.Maps; import org.openqa.grid.internal.Registry; import org.openqa.grid.internal.utils.GridHubConfiguration; import org.openqa.grid.web.servlet.beta.ConsoleServlet; import org.openqa.grid.web.servlet.DisplayHelpServlet; import org.openqa.grid.web.servlet.DriverServlet; import org.openqa.grid.web.servlet.Grid1HeartbeatServlet; import org.openqa.grid.web.servlet.HubStatusServlet; import org.openqa.grid.web.servlet.LifecycleServlet; import org.openqa.grid.web.servlet.ProxyStatusServlet; import org.openqa.grid.web.servlet.RegistrationServlet; import org.openqa.grid.web.servlet.ResourceServlet; import org.openqa.grid.web.servlet.TestSessionStatusServlet; import org.openqa.grid.web.utils.ExtraServletUtil; import org.openqa.selenium.net.NetworkUtils; import org.seleniumhq.jetty7.server.Server; import org.seleniumhq.jetty7.server.bio.SocketConnector; import org.seleniumhq.jetty7.servlet.ServletContextHandler; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; import java.util.logging.Logger; import javax.servlet.Servlet; /** * Jetty server. Main entry point for everything about the grid. <p/> Except for unit tests, this * should be a singleton. */ public class Hub { private static final Logger log = Logger.getLogger(Hub.class.getName()); private final int port; private final String host; private final Registry registry; private final Map<String, Class<? extends Servlet>> extraServlet = Maps.newHashMap(); private Server server; private void addServlet(String key, Class<? extends Servlet> s) { extraServlet.put(key, s); } /** * get the registry backing up the hub state. * * @return The registry */ public Registry getRegistry() { return registry; } public Hub(GridHubConfiguration config) { registry = Registry.newInstance(this, config); if (config.getHost() != null) { host = config.getHost(); } else { NetworkUtils utils = new NetworkUtils(); host = utils.getIp4NonLoopbackAddressOfThisMachine().getHostAddress(); } this.port = config.getPort(); for (String s : config.getServlets()) { Class<? extends Servlet> servletClass = ExtraServletUtil.createServlet(s); if (servletClass != null) { String path = "/grid/admin/" + servletClass.getSimpleName() + "/*"; log.info("binding " + servletClass.getCanonicalName() + " to " + path); addServlet(path, servletClass); } } initServer(); } private void initServer() { try { server = new Server(); SocketConnector socketListener = new SocketConnector(); socketListener.setMaxIdleTime(60000); socketListener.setPort(port); server.addConnector(socketListener); ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.setContextPath("/"); server.setHandler(root); root.setAttribute(Registry.KEY, registry); root.addServlet(DisplayHelpServlet.class.getName(), "/*"); root.addServlet(ConsoleServlet.class.getName(), "/grid/console/*"); root.addServlet(ConsoleServlet.class.getName(), "/grid/beta/console/*"); root.addServlet(org.openqa.grid.web.servlet.ConsoleServlet.class.getName(), "/grid/old/console/*"); root.addServlet(RegistrationServlet.class.getName(), "/grid/register/*"); // TODO remove at some point. Here for backward compatibility of // tests etc. root.addServlet(DriverServlet.class.getName(), "/grid/driver/*"); root.addServlet(DriverServlet.class.getName(), "/wd/hub/*"); root.addServlet(DriverServlet.class.getName(), "/selenium-server/driver/*"); root.addServlet(ResourceServlet.class.getName(), "/grid/resources/*"); root.addServlet(ProxyStatusServlet.class.getName(), "/grid/api/proxy/*"); root.addServlet(HubStatusServlet.class.getName(), "/grid/api/hub/*"); root.addServlet(TestSessionStatusServlet.class.getName(), "/grid/api/testsession/*"); root.addServlet(LifecycleServlet.class.getName(), "/lifecycle-manager/*"); // Selenium Grid 1.0 compatibility routes for older nodes trying to // work with the newer hub. root.addServlet(RegistrationServlet.class.getName(), "/registration-manager/register/*"); root.addServlet(Grid1HeartbeatServlet.class.getName(), "/heartbeat"); // Load any additional servlets provided by the user. for (Map.Entry<String, Class<? extends Servlet>> entry : extraServlet.entrySet()) { root.addServlet(entry.getValue().getName(), entry.getKey()); } } catch (Throwable e) { throw new RuntimeException("Error initializing the hub" + e.getMessage(), e); } } public int getPort() { return port; } public String getHost() { return host; } public void start() throws Exception { initServer(); server.start(); } public void stop() throws Exception { server.stop(); } public URL getUrl() { try { return new URL("http://" + getHost() + ":" + getPort()); } catch (MalformedURLException e) { throw new RuntimeException(e.getMessage()); } } public URL getRegistrationURL() { String uri = "http://" + getHost() + ":" + getPort() + "/grid/register/"; try { return new URL(uri); } catch (MalformedURLException e) { throw new RuntimeException(e); } } }
java/server/src/org/openqa/grid/web/Hub.java
/* Copyright 2011 Selenium committers Copyright 2011 Software Freedom Conservancy Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.grid.web; import com.google.common.collect.Maps; import org.openqa.grid.internal.Registry; import org.openqa.grid.internal.utils.GridHubConfiguration; import org.openqa.grid.web.servlet.beta.ConsoleServlet; import org.openqa.grid.web.servlet.DisplayHelpServlet; import org.openqa.grid.web.servlet.DriverServlet; import org.openqa.grid.web.servlet.Grid1HeartbeatServlet; import org.openqa.grid.web.servlet.HubStatusServlet; import org.openqa.grid.web.servlet.LifecycleServlet; import org.openqa.grid.web.servlet.ProxyStatusServlet; import org.openqa.grid.web.servlet.RegistrationServlet; import org.openqa.grid.web.servlet.ResourceServlet; import org.openqa.grid.web.servlet.TestSessionStatusServlet; import org.openqa.grid.web.utils.ExtraServletUtil; import org.openqa.selenium.net.NetworkUtils; import org.seleniumhq.jetty7.server.Server; import org.seleniumhq.jetty7.server.bio.SocketConnector; import org.seleniumhq.jetty7.servlet.ServletContextHandler; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; import java.util.logging.Logger; import javax.servlet.Servlet; /** * Jetty server. Main entry point for everything about the grid. <p/> Except for unit tests, this * should be a singleton. */ public class Hub { private static final Logger log = Logger.getLogger(Hub.class.getName()); private final int port; private final String host; private final Registry registry; private final Map<String, Class<? extends Servlet>> extraServlet = Maps.newHashMap(); private Server server; private void addServlet(String key, Class<? extends Servlet> s) { extraServlet.put(key, s); } /** * get the registry backing up the hub state. * * @return The registry */ public Registry getRegistry() { return registry; } public Hub(GridHubConfiguration config) { registry = Registry.newInstance(this, config); if (config.getHost() != null) { host = config.getHost(); } else { NetworkUtils utils = new NetworkUtils(); host = utils.getIp4NonLoopbackAddressOfThisMachine().getHostAddress(); } this.port = config.getPort(); for (String s : config.getServlets()) { Class<? extends Servlet> servletClass = ExtraServletUtil.createServlet(s); if (servletClass != null) { String path = "/grid/admin/" + servletClass.getSimpleName() + "/*"; log.info("binding " + servletClass.getCanonicalName() + " to " + path); addServlet(path, servletClass); } } initServer(); } private void initServer() { try { server = new Server(); SocketConnector socketListener = new SocketConnector(); socketListener.setMaxIdleTime(60000); socketListener.setPort(port); server.addConnector(socketListener); ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.setContextPath("/"); server.setHandler(root); root.setAttribute(Registry.KEY, registry); root.addServlet(DisplayHelpServlet.class.getName(), "/*"); root.addServlet(ConsoleServlet.class.getName(), "/grid/console/*"); root.addServlet(ConsoleServlet.class.getName(), "/grid/beta/console/*"); root.addServlet(RegistrationServlet.class.getName(), "/grid/register/*"); // TODO remove at some point. Here for backward compatibility of // tests etc. root.addServlet(DriverServlet.class.getName(), "/grid/driver/*"); root.addServlet(DriverServlet.class.getName(), "/wd/hub/*"); root.addServlet(DriverServlet.class.getName(), "/selenium-server/driver/*"); root.addServlet(ResourceServlet.class.getName(), "/grid/resources/*"); root.addServlet(ProxyStatusServlet.class.getName(), "/grid/api/proxy/*"); root.addServlet(HubStatusServlet.class.getName(), "/grid/api/hub/*"); root.addServlet(TestSessionStatusServlet.class.getName(), "/grid/api/testsession/*"); root.addServlet(LifecycleServlet.class.getName(), "/lifecycle-manager/*"); // Selenium Grid 1.0 compatibility routes for older nodes trying to // work with the newer hub. root.addServlet(RegistrationServlet.class.getName(), "/registration-manager/register/*"); root.addServlet(Grid1HeartbeatServlet.class.getName(), "/heartbeat"); // Load any additional servlets provided by the user. for (Map.Entry<String, Class<? extends Servlet>> entry : extraServlet.entrySet()) { root.addServlet(entry.getValue().getName(), entry.getKey()); } } catch (Throwable e) { throw new RuntimeException("Error initializing the hub" + e.getMessage(), e); } } public int getPort() { return port; } public String getHost() { return host; } public void start() throws Exception { initServer(); server.start(); } public void stop() throws Exception { server.stop(); } public URL getUrl() { try { return new URL("http://" + getHost() + ":" + getPort()); } catch (MalformedURLException e) { throw new RuntimeException(e.getMessage()); } } public URL getRegistrationURL() { String uri = "http://" + getHost() + ":" + getPort() + "/grid/register/"; try { return new URL(uri); } catch (MalformedURLException e) { throw new RuntimeException(e); } } }
Old console is now available at /grid/old/console/
java/server/src/org/openqa/grid/web/Hub.java
Old console is now available at /grid/old/console/
Java
apache-2.0
82a4550f7d552f8cd0909d6a90cce42b9486a1cd
0
akosyakov/intellij-community,supersven/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,slisson/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,samthor/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,wreckJ/intellij-community,caot/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,holmes/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,samthor/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,petteyg/intellij-community,apixandru/intellij-community,ryano144/intellij-community,hurricup/intellij-community,allotria/intellij-community,hurricup/intellij-community,allotria/intellij-community,signed/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,izonder/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,da1z/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,retomerz/intellij-community,samthor/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,blademainer/intellij-community,robovm/robovm-studio,fitermay/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,caot/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,semonte/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,clumsy/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,samthor/intellij-community,fitermay/intellij-community,xfournet/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,xfournet/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,robovm/robovm-studio,fnouama/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,caot/intellij-community,amith01994/intellij-community,vladmm/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,da1z/intellij-community,slisson/intellij-community,clumsy/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,robovm/robovm-studio,signed/intellij-community,retomerz/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,holmes/intellij-community,semonte/intellij-community,diorcety/intellij-community,semonte/intellij-community,jagguli/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,semonte/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,holmes/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,FHannes/intellij-community,da1z/intellij-community,akosyakov/intellij-community,samthor/intellij-community,ibinti/intellij-community,slisson/intellij-community,supersven/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,caot/intellij-community,jagguli/intellij-community,izonder/intellij-community,clumsy/intellij-community,signed/intellij-community,apixandru/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,petteyg/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,holmes/intellij-community,semonte/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,caot/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,caot/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,dslomov/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,semonte/intellij-community,signed/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,da1z/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,signed/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,holmes/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,xfournet/intellij-community,robovm/robovm-studio,signed/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,diorcety/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,petteyg/intellij-community,xfournet/intellij-community,clumsy/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,izonder/intellij-community,vladmm/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,izonder/intellij-community,petteyg/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,caot/intellij-community,jagguli/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,slisson/intellij-community,vladmm/intellij-community,dslomov/intellij-community,asedunov/intellij-community,supersven/intellij-community,Lekanich/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,kdwink/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,supersven/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,holmes/intellij-community,FHannes/intellij-community,xfournet/intellij-community,xfournet/intellij-community,slisson/intellij-community,akosyakov/intellij-community,caot/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,xfournet/intellij-community,amith01994/intellij-community,ryano144/intellij-community,slisson/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,signed/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,adedayo/intellij-community,signed/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,samthor/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,jagguli/intellij-community,asedunov/intellij-community,dslomov/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,semonte/intellij-community,signed/intellij-community,youdonghai/intellij-community,allotria/intellij-community,clumsy/intellij-community,retomerz/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,supersven/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,vladmm/intellij-community,fnouama/intellij-community,fnouama/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,supersven/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,caot/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,amith01994/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,caot/intellij-community,asedunov/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,orekyuu/intellij-community,da1z/intellij-community,izonder/intellij-community,fitermay/intellij-community,fitermay/intellij-community,ibinti/intellij-community,ibinti/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,adedayo/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,allotria/intellij-community,slisson/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,fitermay/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,allotria/intellij-community,izonder/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,diorcety/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,da1z/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,petteyg/intellij-community,slisson/intellij-community,signed/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,diorcety/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,asedunov/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,tmpgit/intellij-community,izonder/intellij-community,retomerz/intellij-community,slisson/intellij-community,fitermay/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,fitermay/intellij-community,ryano144/intellij-community,kool79/intellij-community,robovm/robovm-studio,robovm/robovm-studio,orekyuu/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,kool79/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,diorcety/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,amith01994/intellij-community,allotria/intellij-community,fitermay/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,blademainer/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,izonder/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,holmes/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,fnouama/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,ryano144/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,caot/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,slisson/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,semonte/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,holmes/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,vvv1559/intellij-community,samthor/intellij-community,ibinti/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,fitermay/intellij-community,ahb0327/intellij-community
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.profile.codeInspection.ui; import com.intellij.CommonBundle; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInsight.daemon.HighlightDisplayKey; import com.intellij.codeInsight.daemon.impl.HighlightInfoType; import com.intellij.codeInsight.daemon.impl.SeverityRegistrar; import com.intellij.codeInsight.daemon.impl.SeverityUtil; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.codeInspection.InspectionProfile; import com.intellij.codeInspection.InspectionsBundle; import com.intellij.codeInspection.ModifiableModel; import com.intellij.codeInspection.ex.*; import com.intellij.icons.AllIcons; import com.intellij.ide.CommonActionsManager; import com.intellij.ide.DefaultTreeExpander; import com.intellij.ide.TreeExpander; import com.intellij.ide.ui.search.SearchUtil; import com.intellij.ide.ui.search.SearchableOptionsRegistrar; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.profile.ApplicationProfileManager; import com.intellij.profile.DefaultProjectProfileManager; import com.intellij.profile.ProfileManager; import com.intellij.profile.codeInspection.InspectionProfileManager; import com.intellij.profile.codeInspection.InspectionProfileManagerImpl; import com.intellij.profile.codeInspection.InspectionProjectProfileManager; import com.intellij.profile.codeInspection.SeverityProvider; import com.intellij.profile.codeInspection.ui.actions.AddScopeAction; import com.intellij.profile.codeInspection.ui.actions.DeleteScopeAction; import com.intellij.profile.codeInspection.ui.actions.MoveScopeAction; import com.intellij.psi.search.scope.packageSet.NamedScope; import com.intellij.ui.*; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.Alarm; import com.intellij.util.IconUtil; import com.intellij.util.config.StorageAccessors; import com.intellij.util.containers.Convertor; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.TreeExpansionEvent; import javax.swing.event.TreeExpansionListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.DefaultTreeSelectionModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.io.IOException; import java.io.StringReader; import java.util.*; import java.util.List; /** * User: anna * Date: 31-May-2006 */ public class SingleInspectionProfilePanel extends JPanel { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.ex.InspectionToolsPanel"); @NonNls private static final String INSPECTION_FILTER_HISTORY = "INSPECTION_FILTER_HISTORY"; private static final String UNDER_CONSTRUCTION = InspectionsBundle.message("inspection.tool.description.under.construction.text"); private final Map<Descriptor, List<Descriptor>> myDescriptors = new HashMap<Descriptor, List<Descriptor>>(); private InspectionProfileImpl mySelectedProfile; private JEditorPane myBrowser; private JPanel myOptionsPanel; private JPanel myInspectionProfilePanel = null; private FilterComponent myProfileFilter; private final InspectionConfigTreeNode myRoot = new InspectionConfigTreeNode(InspectionsBundle.message("inspection.root.node.title"), null, false, false); private final Alarm myAlarm = new Alarm(); private boolean myModified = false; private Tree myTree; private TreeExpander myTreeExpander; @NotNull private String myInitialProfile; @NonNls private static final String EMPTY_HTML = "<html><body></body></html>"; private boolean myIsInRestore = false; @NonNls private static final String VERTICAL_DIVIDER_PROPORTION = "VERTICAL_DIVIDER_PROPORTION"; @NonNls private static final String HORIZONTAL_DIVIDER_PROPORTION = "HORIZONTAL_DIVIDER_PROPORTION"; private final StorageAccessors myProperties = StorageAccessors.createGlobal("SingleInspectionProfilePanel"); private boolean myShareProfile; private final InspectionProjectProfileManager myProjectProfileManager; private Splitter myRightSplitter; private Splitter myMainSplitter; public SingleInspectionProfilePanel(@NotNull InspectionProjectProfileManager projectProfileManager, @NotNull String inspectionProfileName, @NotNull ModifiableModel profile) { super(new BorderLayout()); myProjectProfileManager = projectProfileManager; mySelectedProfile = (InspectionProfileImpl)profile; myInitialProfile = inspectionProfileName; myShareProfile = profile.getProfileManager() == projectProfileManager; } private static VisibleTreeState getExpandedNodes(InspectionProfileImpl profile) { if (profile.getProfileManager() instanceof ApplicationProfileManager) { return AppInspectionProfilesVisibleTreeState.getInstance().getVisibleTreeState(profile); } else { DefaultProjectProfileManager projectProfileManager = (DefaultProjectProfileManager)profile.getProfileManager(); return ProjectInspectionProfilesVisibleTreeState.getInstance(projectProfileManager.getProject()).getVisibleTreeState(profile); } } private void initUI() { myInspectionProfilePanel = createInspectionProfileSettingsPanel(); add(myInspectionProfilePanel, BorderLayout.CENTER); UserActivityWatcher userActivityWatcher = new UserActivityWatcher(); userActivityWatcher.addUserActivityListener(new UserActivityListener() { @Override public void stateChanged() { //invoke after all other listeners SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (mySelectedProfile == null) return; //panel was disposed updateProperSettingsForSelection(); wereToolSettingsModified(); } }); } }); userActivityWatcher.register(myOptionsPanel); updateSelectedProfileState(); reset(); } private void updateSelectedProfileState() { if (mySelectedProfile == null) return; restoreTreeState(); repaintTableData(); updateSelection(); } public void updateSelection() { if (myTree != null) { final TreePath selectionPath = myTree.getSelectionPath(); if (selectionPath != null) { TreeUtil.selectNode(myTree, (TreeNode)selectionPath.getLastPathComponent()); TreeUtil.showRowCentered(myTree, myTree.getRowForPath(selectionPath), false); } } } private void wereToolSettingsModified() { for (Map.Entry<Descriptor, List<Descriptor>> entry : myDescriptors.entrySet()) { Descriptor desc = entry.getKey(); if (wereToolSettingsModified(desc)) return; List<Descriptor> descriptors = entry.getValue(); for (Descriptor descriptor : descriptors) { if (wereToolSettingsModified(descriptor)) return; } } myModified = false; } private boolean wereToolSettingsModified(Descriptor descriptor) { InspectionToolWrapper toolWrapper = descriptor.getToolWrapper(); if (toolWrapper == null || !mySelectedProfile.isToolEnabled(descriptor.getKey())) { return false; } Element oldConfig = descriptor.getConfig(); if (oldConfig == null) return false; Element newConfig = Descriptor.createConfigElement(toolWrapper); if (!JDOMUtil.areElementsEqual(oldConfig, newConfig)) { myAlarm.cancelAllRequests(); myAlarm.addRequest(new Runnable() { @Override public void run() { myTree.repaint(); } }, 300); myModified = true; return true; } return false; } private void updateProperSettingsForSelection() { final TreePath selectionPath = myTree.getSelectionPath(); if (selectionPath != null) { InspectionConfigTreeNode node = (InspectionConfigTreeNode)selectionPath.getLastPathComponent(); final Descriptor descriptor = node.getDescriptor(); if (descriptor != null) { final boolean properSetting = mySelectedProfile.isProperSetting(descriptor.getKey().toString()); if (node.isProperSetting() != properSetting) { myAlarm.cancelAllRequests(); myAlarm.addRequest(new Runnable() { @Override public void run() { myTree.repaint(); } }, 300); node.dropCache(); updateUpHierarchy(node, (InspectionConfigTreeNode)node.getParent()); } } } } private void initDescriptors() { final InspectionProfileImpl profile = mySelectedProfile; if (profile == null) return; myDescriptors.clear(); List<ScopeToolState> tools = profile.getDefaultStates(myProjectProfileManager.getProject()); for (ScopeToolState state : tools) { final ArrayList<Descriptor> descriptors = new ArrayList<Descriptor>(); if (state.getLevel() == HighlightDisplayLevel.NON_SWITCHABLE_ERROR) { continue; } Project project = myProjectProfileManager.getProject(); myDescriptors.put(new Descriptor(state, profile, project), descriptors); InspectionToolWrapper toolWrapper = state.getTool(); final List<ScopeToolState> nonDefaultTools = profile.getNonDefaultTools(toolWrapper.getShortName(), project); for (ScopeToolState nonDefaultToolState : nonDefaultTools) { descriptors.add(new Descriptor(nonDefaultToolState, profile, project)); } } } private void postProcessModification() { wereToolSettingsModified(); //resetup configs for (ScopeToolState state : mySelectedProfile.getAllTools(myProjectProfileManager.getProject())) { state.resetConfigPanel(); } fillTreeData(myProfileFilter.getFilter(), true); repaintTableData(); updateOptionsAndDescriptionPanel(myTree.getSelectionPath()); } @Nullable public static ModifiableModel createNewProfile(final int initValue, ModifiableModel selectedProfile, JPanel parent, String profileName, Set<String> existingProfileNames, @NotNull Project project) { profileName = Messages.showInputDialog(parent, profileName, "Create New Inspection Profile", Messages.getQuestionIcon()); if (profileName == null) return null; final ProfileManager profileManager = selectedProfile.getProfileManager(); if (existingProfileNames.contains(profileName)) { Messages.showErrorDialog(InspectionsBundle.message("inspection.unable.to.create.profile.message", profileName), InspectionsBundle.message("inspection.unable.to.create.profile.dialog.title")); return null; } InspectionProfileImpl inspectionProfile = new InspectionProfileImpl(profileName, InspectionToolRegistrar.getInstance(), profileManager); if (initValue == -1) { inspectionProfile.initInspectionTools(project); ModifiableModel profileModifiableModel = inspectionProfile.getModifiableModel(); final InspectionToolWrapper[] profileEntries = profileModifiableModel.getInspectionTools(null); for (InspectionToolWrapper toolWrapper : profileEntries) { profileModifiableModel.disableTool(toolWrapper.getShortName(), (NamedScope)null, project); } profileModifiableModel.setLocal(true); profileModifiableModel.setModified(true); return profileModifiableModel; } else if (initValue == 0) { inspectionProfile.copyFrom(selectedProfile); inspectionProfile.setName(profileName); inspectionProfile.initInspectionTools(project); inspectionProfile.setModified(true); return inspectionProfile; } return null; } public void setFilter(String filter) { myProfileFilter.setFilter(filter); } public void filterTree(String filter) { if (myTree != null) { getExpandedNodes(mySelectedProfile).saveVisibleState(myTree); fillTreeData(filter, true); reloadModel(); restoreTreeState(); if (myTree.getSelectionPath() == null) { TreeUtil.selectFirstNode(myTree); } } } private void reloadModel() { try { myIsInRestore = true; ((DefaultTreeModel)myTree.getModel()).reload(); } finally { myIsInRestore = false; } } private void restoreTreeState() { try { myIsInRestore = true; getExpandedNodes(mySelectedProfile).restoreVisibleState(myTree); } finally { myIsInRestore = false; } } private ActionToolbar createTreeToolbarPanel() { final CommonActionsManager actionManager = CommonActionsManager.getInstance(); DefaultActionGroup actions = new DefaultActionGroup(); actions.add(actionManager.createExpandAllAction(myTreeExpander, myTree)); actions.add(actionManager.createCollapseAllAction(myTreeExpander, myTree)); actions.add(new AnAction(CommonBundle.message("button.reset.to.default"), CommonBundle.message("button.reset.to.default"), AllIcons.General.Reset) { { registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_R, InputEvent.CTRL_MASK)), myTree); } @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(myRoot.isProperSetting()); } @Override public void actionPerformed(AnActionEvent e) { mySelectedProfile.resetToBase(myProjectProfileManager.getProject()); postProcessModification(); } }); actions.add(new AnAction("Reset to Empty", "Reset to empty", AllIcons.Actions.Reset_to_empty){ @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(mySelectedProfile != null && mySelectedProfile.isExecutable(myProjectProfileManager.getProject())); } @Override public void actionPerformed(AnActionEvent e) { mySelectedProfile.resetToEmpty(e.getProject()); postProcessModification(); } }); actions.add(new ToggleAction("Lock Profile", "Lock profile", AllIcons.Nodes.Padlock) { @Override public boolean isSelected(AnActionEvent e) { return mySelectedProfile != null && mySelectedProfile.isProfileLocked(); } @Override public void setSelected(AnActionEvent e, boolean state) { mySelectedProfile.lockProfile(state); } }); actions.addSeparator(); actions.add(new MyAddScopeAction()); actions.add(new MyDeleteScopeAction()); actions.add(new MoveScopeAction(myTree, "Move Scope Up", IconUtil.getMoveUpIcon(), -1) { @Override protected boolean isEnabledFor(int idx, InspectionConfigTreeNode parent) { return idx > 0; } @Override protected InspectionProfileImpl getSelectedProfile() { return mySelectedProfile; } }); actions.add(new MoveScopeAction(myTree, "Move Scope Down", IconUtil.getMoveDownIcon(), 1) { @Override protected boolean isEnabledFor(int idx, InspectionConfigTreeNode parent) { return idx < parent.getChildCount() - 2; } @Override protected InspectionProfileImpl getSelectedProfile() { return mySelectedProfile; } }); actions.addSeparator(); final ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, actions, true); actionToolbar.setTargetComponent(this); return actionToolbar; } private void repaintTableData() { if (myTree != null) { getExpandedNodes(mySelectedProfile).saveVisibleState(myTree); reloadModel(); restoreTreeState(); } } public void selectInspectionTool(String name) { final InspectionConfigTreeNode node = findNodeByKey(name, myRoot); if (node != null) { TreeUtil.showRowCentered(myTree, myTree.getRowForPath(new TreePath(node.getPath())) - 1, true);//myTree.isRootVisible ? 0 : 1; TreeUtil.selectNode(myTree, node); } } @Nullable private static InspectionConfigTreeNode findNodeByKey(String name, InspectionConfigTreeNode root) { for (int i = 0; i < root.getChildCount(); i++) { final InspectionConfigTreeNode child = (InspectionConfigTreeNode)root.getChildAt(i); final Descriptor descriptor = child.getDescriptor(); if (descriptor != null) { if (descriptor.getKey().toString().equals(name)) { return child; } } else { final InspectionConfigTreeNode node = findNodeByKey(name, child); if (node != null) return node; } } return null; } private JScrollPane initTreeScrollPane() { fillTreeData(null, true); final InspectionsConfigTreeRenderer renderer = new InspectionsConfigTreeRenderer(myProjectProfileManager.getProject()){ @Override protected String getFilter() { return myProfileFilter != null ? myProfileFilter.getFilter() : null; } }; myTree = new CheckboxTree(renderer, myRoot) { @Override public Dimension getPreferredScrollableViewportSize() { Dimension size = super.getPreferredScrollableViewportSize(); size = new Dimension(size.width + 10, size.height); return size; } @Override protected void onNodeStateChanged(final CheckedTreeNode node) { toggleToolNode((InspectionConfigTreeNode)node); } }; myTree.setCellRenderer(renderer); myTree.setRootVisible(false); myTree.setShowsRootHandles(true); UIUtil.setLineStyleAngled(myTree); TreeUtil.installActions(myTree); myTree.addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent e) { if (myTree.getSelectionPaths() != null && myTree.getSelectionPaths().length == 1) { updateOptionsAndDescriptionPanel(myTree.getSelectionPaths()[0]); } else { initOptionsAndDescriptionPanel(); } if (!myIsInRestore) { InspectionProfileImpl selected = mySelectedProfile; if (selected != null) { InspectionProfileImpl baseProfile = (InspectionProfileImpl)selected.getParentProfile(); if (baseProfile != null) { getExpandedNodes(baseProfile).setSelectionPaths(myTree.getSelectionPaths()); } getExpandedNodes(selected).setSelectionPaths(myTree.getSelectionPaths()); } } } }); myTree.addMouseListener(new PopupHandler() { @Override public void invokePopup(Component comp, int x, int y) { final int[] selectionRows = myTree.getSelectionRows(); if (selectionRows != null && myTree.getPathForLocation(x, y) != null && Arrays.binarySearch(selectionRows, myTree.getRowForLocation(x, y)) > -1) { compoundPopup().show(comp, x, y); } } }); new TreeSpeedSearch(myTree, new Convertor<TreePath, String>() { @Override public String convert(TreePath o) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)o.getLastPathComponent(); final Descriptor descriptor = node.getDescriptor(); return descriptor != null ? InspectionsConfigTreeComparator.getDisplayTextToSort(descriptor.getText()) : InspectionsConfigTreeComparator .getDisplayTextToSort(node.getGroupName()); } }); myTree.setSelectionModel(new DefaultTreeSelectionModel()); final JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myTree); scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED); TreeUtil.collapseAll(myTree, 1); myTree.addTreeExpansionListener(new TreeExpansionListener() { @Override public void treeCollapsed(TreeExpansionEvent event) { InspectionProfileImpl selected = mySelectedProfile; final InspectionConfigTreeNode node = (InspectionConfigTreeNode)event.getPath().getLastPathComponent(); final InspectionProfileImpl parentProfile = (InspectionProfileImpl)selected.getParentProfile(); if (parentProfile != null) { getExpandedNodes(parentProfile).saveVisibleState(myTree); } getExpandedNodes(selected).saveVisibleState(myTree); } @Override public void treeExpanded(TreeExpansionEvent event) { InspectionProfileImpl selected = mySelectedProfile; if (selected != null) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)event.getPath().getLastPathComponent(); final InspectionProfileImpl parentProfile = (InspectionProfileImpl)selected.getParentProfile(); if (parentProfile != null) { getExpandedNodes(parentProfile).expandNode(node); } getExpandedNodes(selected).expandNode(node); } } }); myTreeExpander = new DefaultTreeExpander(myTree); myProfileFilter = new MyFilterComponent(); return scrollPane; } private JPopupMenu compoundPopup() { final DefaultActionGroup group = new DefaultActionGroup(); final SeverityRegistrar severityRegistrar = ((SeverityProvider)mySelectedProfile.getProfileManager()).getOwnSeverityRegistrar(); TreeSet<HighlightSeverity> severities = new TreeSet<HighlightSeverity>(severityRegistrar); severities.add(HighlightSeverity.ERROR); severities.add(HighlightSeverity.WARNING); severities.add(HighlightSeverity.WEAK_WARNING); final Collection<SeverityRegistrar.SeverityBasedTextAttributes> infoTypes = SeverityUtil.getRegisteredHighlightingInfoTypes(severityRegistrar); for (SeverityRegistrar.SeverityBasedTextAttributes info : infoTypes) { severities.add(info.getSeverity()); } for (HighlightSeverity severity : severities) { final HighlightDisplayLevel level = HighlightDisplayLevel.find(severity); group.add(new AnAction(renderSeverity(severity), renderSeverity(severity), level.getIcon()) { @Override public void actionPerformed(AnActionEvent e) { setNewHighlightingLevel(level); } }); } group.add(Separator.getInstance()); group.add(new MyAddScopeAction()); group.add(new MyDeleteScopeAction()); ActionPopupMenu menu = ActionManager.getInstance().createActionPopupMenu(ActionPlaces.UNKNOWN, group); return menu.getComponent(); } static String renderSeverity(HighlightSeverity severity) { return StringUtil.capitalizeWords(severity.toString().toLowerCase(), true); } private void toggleToolNode(final InspectionConfigTreeNode toolNode) { final Descriptor descriptor = toolNode.getDescriptor(); Project project = myProjectProfileManager.getProject(); if (descriptor!= null) { final HighlightDisplayKey key = descriptor.getKey(); final String toolShortName = key.toString(); if (toolNode.isChecked()) { if (toolNode.getScope(project) != null){ if (toolNode.isByDefault()) { mySelectedProfile.enableToolByDefault(toolShortName, project); } else { mySelectedProfile.enableTool(toolShortName, toolNode.getScope(project), project); } } else { mySelectedProfile.enableTool(toolShortName, project); } } else { if (toolNode.getScope(project) != null) { if (toolNode.isByDefault()) { mySelectedProfile.disableToolByDefault(toolShortName, project); } else { mySelectedProfile.disableTool(toolShortName, toolNode.getScope(project), project); } } else if (toolNode.getChildCount() == 0){ //default node and no scopes configured mySelectedProfile.disableTool(toolShortName, project); } } toolNode.dropCache(); updateUpHierarchy(toolNode, (InspectionConfigTreeNode)toolNode.getParent()); } final TreePath path = new TreePath(toolNode.getPath()); if (Comparing.equal(myTree.getSelectionPath(), path)) { updateOptionsAndDescriptionPanel(path); } } private static void updateUpHierarchy(final InspectionConfigTreeNode node, final InspectionConfigTreeNode parent) { if (parent != null) { parent.dropCache(); updateUpHierarchy(parent, (InspectionConfigTreeNode)parent.getParent()); } } private static boolean isDescriptorAccepted(Descriptor descriptor, @NonNls String filter, final boolean forceInclude, final List<Set<String>> keySetList, final Set<String> quoted) { filter = filter.toLowerCase(); if (StringUtil.containsIgnoreCase(descriptor.getText(), filter)) { return true; } final String[] groupPath = descriptor.getGroup(); for (String group : groupPath) { if (StringUtil.containsIgnoreCase(group, filter)) { return true; } } for (String stripped : quoted) { if (StringUtil.containsIgnoreCase(descriptor.getText(),stripped)) { return true; } for (String group : groupPath) { if (StringUtil.containsIgnoreCase(group,stripped)) { return true; } } final String description = descriptor.getToolWrapper().loadDescription(); if (description != null && StringUtil.containsIgnoreCase(description.toLowerCase(), stripped)) { if (!forceInclude) return true; } else if (forceInclude) return false; } for (Set<String> keySet : keySetList) { if (keySet.contains(descriptor.getKey().toString())) { if (!forceInclude) { return true; } } else { if (forceInclude) { return false; } } } return forceInclude; } private void fillTreeData(String filter, boolean forceInclude) { if (mySelectedProfile == null) return; myRoot.removeAllChildren(); myRoot.setChecked(false); myRoot.dropCache(); List<Set<String>> keySetList = new ArrayList<Set<String>>(); final Set<String> quoted = new HashSet<String>(); if (filter != null && !filter.isEmpty()) { keySetList.addAll(SearchUtil.findKeys(filter, quoted)); } Project project = myProjectProfileManager.getProject(); for (Descriptor descriptor : myDescriptors.keySet()) { if (filter != null && !filter.isEmpty() && !isDescriptorAccepted(descriptor, filter, forceInclude, keySetList, quoted)) { continue; } final List<ScopeToolState> nonDefaultTools = mySelectedProfile.getNonDefaultTools(descriptor.getKey().toString(), project); final HighlightDisplayKey key = descriptor.getKey(); final boolean enabled = mySelectedProfile.isToolEnabled(key); boolean hasNonDefaultScope = !nonDefaultTools.isEmpty(); final InspectionConfigTreeNode node = new InspectionConfigTreeNode(descriptor, null, !hasNonDefaultScope, enabled, !hasNonDefaultScope); getGroupNode(myRoot, descriptor.getGroup()).add(node); if (hasNonDefaultScope) { for (ScopeToolState nonDefaultState : nonDefaultTools) { node.add(new InspectionConfigTreeNode(new Descriptor(nonDefaultState, mySelectedProfile, project), nonDefaultState, false, false)); } node.add(new InspectionConfigTreeNode(descriptor, descriptor.getState(), true, false)); } myRoot.setEnabled(myRoot.isEnabled() || enabled); myRoot.dropCache(); } if (filter != null && forceInclude && myRoot.getChildCount() == 0) { final Set<String> filters = SearchableOptionsRegistrar.getInstance().getProcessedWords(filter); if (filters.size() > 1 || !quoted.isEmpty()) { fillTreeData(filter, false); } } TreeUtil.sort(myRoot, new InspectionsConfigTreeComparator()); } private void updateOptionsAndDescriptionPanel(TreePath path) { if (path == null) return; final InspectionConfigTreeNode node = (InspectionConfigTreeNode)path.getLastPathComponent(); final Descriptor descriptor = node.getDescriptor(); if (descriptor != null) { final String description = descriptor.loadDescription(); if (description != null) { // need this in order to correctly load plugin-supplied descriptions try { final HintHint hintHint = new HintHint(myBrowser, new Point(0, 0)); hintHint.setFont(myBrowser.getFont()); myBrowser.read(new StringReader(SearchUtil.markup(HintUtil.prepareHintText(description, hintHint), myProfileFilter.getFilter())), null); } catch (IOException e2) { try { //noinspection HardCodedStringLiteral myBrowser.read(new StringReader("<html><body><b>" + UNDER_CONSTRUCTION + "</b></body></html>"), null); } catch (IOException e1) { //Can't be } } } else { try { myBrowser.read(new StringReader(EMPTY_HTML), null); } catch (IOException e1) { //Can't be } } myOptionsPanel.removeAll(); final NamedScope scope = node.getScope(myProjectProfileManager.getProject()); if (scope != null || node.isInspectionNode()) { final HighlightDisplayKey key = descriptor.getKey(); final LevelChooser chooser = new LevelChooser(((SeverityProvider)mySelectedProfile.getProfileManager()).getOwnSeverityRegistrar()) { @Override public Dimension getPreferredSize() { Dimension preferredSize = super.getPreferredSize(); return new Dimension(Math.min(300, preferredSize.width), preferredSize.height); } @Override public Dimension getMinimumSize() { return getPreferredSize(); } }; chooser.getComboBox().addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Project project = myProjectProfileManager.getProject(); boolean toUpdate = mySelectedProfile.getErrorLevel(key, scope, project) != chooser.getLevel(); mySelectedProfile.setErrorLevel(key, chooser.getLevel(), node.isInspectionNode() || node.isByDefault() ? -1 : node.getParent().getIndex(node), project); if (toUpdate) node.dropCache(); } }); chooser.setLevel(mySelectedProfile.getErrorLevel(key, scope, myProjectProfileManager.getProject())); final JPanel withSeverity = new JPanel(new GridBagLayout()); withSeverity.add(new JLabel(InspectionsBundle.message("inspection.severity")), new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 10, 10), 0, 0)); withSeverity.add(chooser, new GridBagConstraints(1, 0, 1, 1, 1.0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 10, 0), 0, 0)); final JComponent comp = descriptor.getState().getAdditionalConfigPanel(); withSeverity.add(comp, new GridBagConstraints(0, 1, 2, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); myOptionsPanel.add(withSeverity, BorderLayout.CENTER); } myOptionsPanel.revalidate(); GuiUtils.enableChildren(myOptionsPanel, node.isChecked()); } else { initOptionsAndDescriptionPanel(); } myOptionsPanel.repaint(); } private void initOptionsAndDescriptionPanel() { myOptionsPanel.removeAll(); try { myBrowser.read(new StringReader(EMPTY_HTML), null); } catch (IOException e1) { //Can't be } myOptionsPanel.validate(); myOptionsPanel.repaint(); } private static InspectionConfigTreeNode getGroupNode(InspectionConfigTreeNode root, String[] groupPath) { InspectionConfigTreeNode currentRoot = root; for (final String group : groupPath) { currentRoot = getGroupNode(currentRoot, group); } return currentRoot; } private static InspectionConfigTreeNode getGroupNode(InspectionConfigTreeNode root, String group) { final int childCount = root.getChildCount(); for (int i = 0; i < childCount; i++) { InspectionConfigTreeNode child = (InspectionConfigTreeNode)root.getChildAt(i); if (group.equals(child.getUserObject())) { return child; } } InspectionConfigTreeNode child = new InspectionConfigTreeNode(group, null, false, false); root.add(child); return child; } public boolean setSelectedProfileModified(boolean modified) { mySelectedProfile.setModified(modified); return modified; } ModifiableModel getSelectedProfile() { return mySelectedProfile; } private void setSelectedProfile(final ModifiableModel modifiableModel) { if (mySelectedProfile == modifiableModel) return; mySelectedProfile = (InspectionProfileImpl)modifiableModel; if (mySelectedProfile != null) { myInitialProfile = mySelectedProfile.getName(); } initDescriptors(); filterTree(myProfileFilter != null ? myProfileFilter.getFilter() : null); } @Override public Dimension getPreferredSize() { return new Dimension(700, 500); } public void disposeUI() { if (myInspectionProfilePanel == null) { return; } myProperties.setFloat(VERTICAL_DIVIDER_PROPORTION, myMainSplitter.getProportion()); myProperties.setFloat(HORIZONTAL_DIVIDER_PROPORTION, myRightSplitter.getProportion()); myAlarm.cancelAllRequests(); myProfileFilter.dispose(); if (mySelectedProfile != null) { for (ScopeToolState state : mySelectedProfile.getAllTools(myProjectProfileManager.getProject())) { state.resetConfigPanel(); } } mySelectedProfile = null; } private JPanel createInspectionProfileSettingsPanel() { myBrowser = new JEditorPane(UIUtil.HTML_MIME, EMPTY_HTML); myBrowser.setEditable(false); myBrowser.setBorder(IdeBorderFactory.createEmptyBorder(5, 5, 5, 5)); myBrowser.addHyperlinkListener(new BrowserHyperlinkListener()); initDescriptors(); fillTreeData(myProfileFilter != null ? myProfileFilter.getFilter() : null, true); JPanel descriptionPanel = new JPanel(new BorderLayout()); descriptionPanel.setBorder(IdeBorderFactory.createTitledBorder(InspectionsBundle.message("inspection.description.title"), false, new Insets(13, 0, 0, 0))); descriptionPanel.add(ScrollPaneFactory.createScrollPane(myBrowser), BorderLayout.CENTER); myRightSplitter = new Splitter(true); myRightSplitter.setFirstComponent(descriptionPanel); myRightSplitter.setProportion(myProperties.getFloat(HORIZONTAL_DIVIDER_PROPORTION, 0.5f)); myOptionsPanel = new JPanel(new BorderLayout()); myOptionsPanel.setBorder(IdeBorderFactory.createTitledBorder("Options", false, new Insets(0, 0, 0, 0))); initOptionsAndDescriptionPanel(); myRightSplitter.setSecondComponent(myOptionsPanel); myRightSplitter.setHonorComponentsMinimumSize(true); final JPanel treePanel = new JPanel(new BorderLayout()); final JScrollPane tree = initTreeScrollPane(); treePanel.add(tree, BorderLayout.CENTER); final JPanel northPanel = new JPanel(new GridBagLayout()); northPanel.setBorder(IdeBorderFactory.createEmptyBorder(2, 0, 2, 0)); northPanel.add(createTreeToolbarPanel().getComponent(), new GridBagConstraints(0, 0, 1, 1, 0.5, 1, GridBagConstraints.BASELINE_LEADING, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); northPanel.add(myProfileFilter, new GridBagConstraints(1, 0, 1, 1, 1, 1, GridBagConstraints.BASELINE_TRAILING, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); treePanel.add(northPanel, BorderLayout.NORTH); myMainSplitter = new Splitter(false); myMainSplitter.setFirstComponent(treePanel); myMainSplitter.setSecondComponent(myRightSplitter); myMainSplitter.setHonorComponentsMinimumSize(false); myMainSplitter.setProportion(myProperties.getFloat(VERTICAL_DIVIDER_PROPORTION, 0.5f)); final JPanel panel = new JPanel(new BorderLayout()); panel.add(myMainSplitter, BorderLayout.CENTER); return panel; } public boolean isModified() { if (myModified) return true; if (mySelectedProfile.isChanged()) return true; if (myShareProfile != (mySelectedProfile.getProfileManager() == myProjectProfileManager)) return true; if (!Comparing.strEqual(myInitialProfile, mySelectedProfile.getName())) return true; if (descriptorsAreChanged()) { return true; } return false; } public void reset() { myModified = false; setSelectedProfile(mySelectedProfile); final String filter = myProfileFilter.getFilter(); myProfileFilter.reset(); myProfileFilter.setSelectedItem(filter); myShareProfile = mySelectedProfile.getProfileManager() == myProjectProfileManager; } public void apply() throws ConfigurationException { final boolean modified = isModified(); if (!modified) { return; } final ModifiableModel selectedProfile = getSelectedProfile(); final ProfileManager profileManager = myShareProfile ? myProjectProfileManager : InspectionProfileManager.getInstance(); selectedProfile.setLocal(!myShareProfile); if (selectedProfile.getProfileManager() != profileManager) { if (selectedProfile.getProfileManager().getProfile(selectedProfile.getName(), false) != null) { selectedProfile.getProfileManager().deleteProfile(selectedProfile.getName()); } copyUsedSeveritiesIfUndefined(selectedProfile, profileManager); selectedProfile.setProfileManager(profileManager); } final InspectionProfile parentProfile = selectedProfile.getParentProfile(); if (((InspectionProfileManagerImpl)InspectionProfileManager.getInstance()).getSchemesManager().isShared(selectedProfile)) { if (descriptorsAreChanged()) { throw new ConfigurationException("Shared profile cannot be modified. Please do \"Save As...\" first."); } } try { selectedProfile.commit(); } catch (IOException e) { throw new ConfigurationException(e.getMessage()); } setSelectedProfile(parentProfile.getModifiableModel()); setSelectedProfileModified(false); myModified = false; } private static void copyUsedSeveritiesIfUndefined(final ModifiableModel selectedProfile, final ProfileManager profileManager) { final SeverityRegistrar registrar = ((SeverityProvider)profileManager).getSeverityRegistrar(); final Set<HighlightSeverity> severities = ((InspectionProfileImpl)selectedProfile).getUsedSeverities(); for (Iterator<HighlightSeverity> iterator = severities.iterator(); iterator.hasNext();) { HighlightSeverity severity = iterator.next(); if (registrar.isSeverityValid(severity.toString())) { iterator.remove(); } } if (!severities.isEmpty()) { final SeverityRegistrar oppositeRegister = ((SeverityProvider)selectedProfile.getProfileManager()).getSeverityRegistrar(); for (HighlightSeverity severity : severities) { final TextAttributesKey attributesKey = TextAttributesKey.find(severity.toString()); final TextAttributes textAttributes = oppositeRegister.getTextAttributesBySeverity(severity); LOG.assertTrue(textAttributes != null); HighlightInfoType.HighlightInfoTypeImpl info = new HighlightInfoType.HighlightInfoTypeImpl(severity, attributesKey); registrar.registerSeverity(new SeverityRegistrar.SeverityBasedTextAttributes(textAttributes.clone(), info), textAttributes.getErrorStripeColor()); } } } private boolean descriptorsAreChanged() { for (Map.Entry<Descriptor, List<Descriptor>> entry : myDescriptors.entrySet()) { Descriptor desc = entry.getKey(); Project project = myProjectProfileManager.getProject(); if (mySelectedProfile.isToolEnabled(desc.getKey(), (NamedScope)null, project) != desc.isEnabled()){ return true; } if (mySelectedProfile.getErrorLevel(desc.getKey(), desc.getScope(), project) != desc.getLevel()) { return true; } final List<Descriptor> descriptors = entry.getValue(); for (Descriptor descriptor : descriptors) { if (mySelectedProfile.isToolEnabled(descriptor.getKey(), descriptor.getScope(), project) != descriptor.isEnabled()) { return true; } if (mySelectedProfile.getErrorLevel(descriptor.getKey(), descriptor.getScope(), project) != descriptor.getLevel()) { return true; } } final List<ScopeToolState> tools = mySelectedProfile.getNonDefaultTools(desc.getKey().toString(), project); if (tools.size() != descriptors.size()) { return true; } for (int i = 0; i < tools.size(); i++) { final ScopeToolState pair = tools.get(i); if (!Comparing.equal(pair.getScope(project), descriptors.get(i).getScope())) { return true; } } } return false; } public Tree getTree() { return myTree; } public boolean isProfileShared() { return myShareProfile; } public void setProfileShared(boolean profileShared) { myShareProfile = profileShared; } @Override public void setVisible(boolean aFlag) { if (aFlag && myInspectionProfilePanel == null) { initUI(); } super.setVisible(aFlag); } private void setNewHighlightingLevel(@NotNull HighlightDisplayLevel level) { final int[] rows = myTree.getSelectionRows(); final boolean showOptionsAndDescriptorPanels = rows != null && rows.length == 1; for (int i = 0; rows != null && i < rows.length; i++) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)myTree.getPathForRow(rows[i]).getLastPathComponent(); final InspectionConfigTreeNode parent = (InspectionConfigTreeNode)node.getParent(); final Object userObject = node.getUserObject(); if (userObject instanceof Descriptor && (node.getScopeName() != null || node.isLeaf())) { updateErrorLevel(node, showOptionsAndDescriptorPanels, level); updateUpHierarchy(node, parent); } else { updateErrorLevelUpInHierarchy(level, showOptionsAndDescriptorPanels, node); updateUpHierarchy(node, parent); } } if (rows != null && rows.length == 1) { updateOptionsAndDescriptionPanel(myTree.getPathForRow(rows[0])); } else { initOptionsAndDescriptionPanel(); } repaintTableData(); } private void updateErrorLevelUpInHierarchy(@NotNull HighlightDisplayLevel level, boolean showOptionsAndDescriptorPanels, InspectionConfigTreeNode node) { node.dropCache(); for (int j = 0; j < node.getChildCount(); j++) { final InspectionConfigTreeNode child = (InspectionConfigTreeNode)node.getChildAt(j); final Object userObject = child.getUserObject(); if (userObject instanceof Descriptor && (child.getScopeName() != null || child.isLeaf())) { updateErrorLevel(child, showOptionsAndDescriptorPanels, level); } else { updateErrorLevelUpInHierarchy(level, showOptionsAndDescriptorPanels, child); } } } private void updateErrorLevel(final InspectionConfigTreeNode child, final boolean showOptionsAndDescriptorPanels, @NotNull HighlightDisplayLevel level) { final HighlightDisplayKey key = child.getDescriptor().getKey(); mySelectedProfile.setErrorLevel(key, level, child.isInspectionNode() || child.isByDefault() ? -1 : child.getParent().getIndex(child), myProjectProfileManager.getProject()); child.dropCache(); if (showOptionsAndDescriptorPanels) { updateOptionsAndDescriptionPanel(new TreePath(child.getPath())); } } private class MyFilterComponent extends FilterComponent { private MyFilterComponent() { super(INSPECTION_FILTER_HISTORY, 10); setHistory(Arrays.asList("\"New in 13\"")); } @Override public void filter() { filterTree(getFilter()); } @Override protected void onlineFilter() { if (mySelectedProfile == null) return; final String filter = getFilter(); getExpandedNodes(mySelectedProfile).saveVisibleState(myTree); fillTreeData(filter, true); reloadModel(); if (filter == null || filter.isEmpty()) { restoreTreeState(); } else { TreeUtil.expandAll(myTree); } } } private class MyAddScopeAction extends AddScopeAction { public MyAddScopeAction() { super(SingleInspectionProfilePanel.this.myTree); } @Override protected InspectionProfileImpl getSelectedProfile() { return mySelectedProfile; } @Override public void actionPerformed(AnActionEvent e) { super.actionPerformed(e); final TreePath[] paths = myTree.getSelectionPaths(); if (paths != null && paths.length == 1) { updateOptionsAndDescriptionPanel(myTree.getSelectionPath()); } else { initOptionsAndDescriptionPanel(); } } } private class MyDeleteScopeAction extends DeleteScopeAction { public MyDeleteScopeAction() { super(SingleInspectionProfilePanel.this.myTree); } @Override protected InspectionProfileImpl getSelectedProfile() { return mySelectedProfile; } } }
platform/lang-impl/src/com/intellij/profile/codeInspection/ui/SingleInspectionProfilePanel.java
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.profile.codeInspection.ui; import com.intellij.CommonBundle; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInsight.daemon.HighlightDisplayKey; import com.intellij.codeInsight.daemon.impl.HighlightInfoType; import com.intellij.codeInsight.daemon.impl.SeverityRegistrar; import com.intellij.codeInsight.daemon.impl.SeverityUtil; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.codeInspection.InspectionProfile; import com.intellij.codeInspection.InspectionsBundle; import com.intellij.codeInspection.ModifiableModel; import com.intellij.codeInspection.ex.*; import com.intellij.icons.AllIcons; import com.intellij.ide.CommonActionsManager; import com.intellij.ide.DefaultTreeExpander; import com.intellij.ide.TreeExpander; import com.intellij.ide.ui.search.SearchUtil; import com.intellij.ide.ui.search.SearchableOptionsRegistrar; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.Splitter; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.profile.ApplicationProfileManager; import com.intellij.profile.DefaultProjectProfileManager; import com.intellij.profile.ProfileManager; import com.intellij.profile.codeInspection.InspectionProfileManager; import com.intellij.profile.codeInspection.InspectionProfileManagerImpl; import com.intellij.profile.codeInspection.InspectionProjectProfileManager; import com.intellij.profile.codeInspection.SeverityProvider; import com.intellij.profile.codeInspection.ui.actions.AddScopeAction; import com.intellij.profile.codeInspection.ui.actions.DeleteScopeAction; import com.intellij.profile.codeInspection.ui.actions.MoveScopeAction; import com.intellij.psi.search.scope.packageSet.NamedScope; import com.intellij.ui.*; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.Alarm; import com.intellij.util.IconUtil; import com.intellij.util.config.StorageAccessors; import com.intellij.util.containers.Convertor; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.TreeExpansionEvent; import javax.swing.event.TreeExpansionListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.DefaultTreeSelectionModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.io.IOException; import java.io.StringReader; import java.util.*; import java.util.List; /** * User: anna * Date: 31-May-2006 */ public class SingleInspectionProfilePanel extends JPanel { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.ex.InspectionToolsPanel"); @NonNls private static final String INSPECTION_FILTER_HISTORY = "INSPECTION_FILTER_HISTORY"; private static final String UNDER_CONSTRUCTION = InspectionsBundle.message("inspection.tool.description.under.construction.text"); private final Map<Descriptor, List<Descriptor>> myDescriptors = new HashMap<Descriptor, List<Descriptor>>(); private InspectionProfileImpl mySelectedProfile; private JEditorPane myBrowser; private JPanel myOptionsPanel; private JPanel myInspectionProfilePanel = null; private FilterComponent myProfileFilter; private final InspectionConfigTreeNode myRoot = new InspectionConfigTreeNode(InspectionsBundle.message("inspection.root.node.title"), null, false, false); private final Alarm myAlarm = new Alarm(); private boolean myModified = false; private Tree myTree; private TreeExpander myTreeExpander; @NotNull private String myInitialProfile; @NonNls private static final String EMPTY_HTML = "<html><body></body></html>"; private boolean myIsInRestore = false; @NonNls private static final String VERTICAL_DIVIDER_PROPORTION = "VERTICAL_DIVIDER_PROPORTION"; @NonNls private static final String HORIZONTAL_DIVIDER_PROPORTION = "HORIZONTAL_DIVIDER_PROPORTION"; private final StorageAccessors myProperties = StorageAccessors.createGlobal("SingleInspectionProfilePanel"); private boolean myShareProfile; private final InspectionProjectProfileManager myProjectProfileManager; private Splitter myRightSplitter; private Splitter myMainSplitter; public SingleInspectionProfilePanel(@NotNull InspectionProjectProfileManager projectProfileManager, @NotNull String inspectionProfileName, @NotNull ModifiableModel profile) { super(new BorderLayout()); myProjectProfileManager = projectProfileManager; mySelectedProfile = (InspectionProfileImpl)profile; myInitialProfile = inspectionProfileName; myShareProfile = profile.getProfileManager() == projectProfileManager; } private static VisibleTreeState getExpandedNodes(InspectionProfileImpl profile) { if (profile.getProfileManager() instanceof ApplicationProfileManager) { return AppInspectionProfilesVisibleTreeState.getInstance().getVisibleTreeState(profile); } else { DefaultProjectProfileManager projectProfileManager = (DefaultProjectProfileManager)profile.getProfileManager(); return ProjectInspectionProfilesVisibleTreeState.getInstance(projectProfileManager.getProject()).getVisibleTreeState(profile); } } private void initUI() { myInspectionProfilePanel = createInspectionProfileSettingsPanel(); add(myInspectionProfilePanel, BorderLayout.CENTER); UserActivityWatcher userActivityWatcher = new UserActivityWatcher(); userActivityWatcher.addUserActivityListener(new UserActivityListener() { @Override public void stateChanged() { //invoke after all other listeners SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (mySelectedProfile == null) return; //panel was disposed updateProperSettingsForSelection(); wereToolSettingsModified(); } }); } }); userActivityWatcher.register(myOptionsPanel); updateSelectedProfileState(); reset(); } private void updateSelectedProfileState() { if (mySelectedProfile == null) return; restoreTreeState(); repaintTableData(); updateSelection(); } public void updateSelection() { if (myTree != null) { final TreePath selectionPath = myTree.getSelectionPath(); if (selectionPath != null) { TreeUtil.selectNode(myTree, (TreeNode)selectionPath.getLastPathComponent()); TreeUtil.showRowCentered(myTree, myTree.getRowForPath(selectionPath), false); } } } private void wereToolSettingsModified() { for (Map.Entry<Descriptor, List<Descriptor>> entry : myDescriptors.entrySet()) { Descriptor desc = entry.getKey(); if (wereToolSettingsModified(desc)) return; List<Descriptor> descriptors = entry.getValue(); for (Descriptor descriptor : descriptors) { if (wereToolSettingsModified(descriptor)) return; } } myModified = false; } private boolean wereToolSettingsModified(Descriptor descriptor) { InspectionToolWrapper toolWrapper = descriptor.getToolWrapper(); if (toolWrapper == null || !mySelectedProfile.isToolEnabled(descriptor.getKey())) { return false; } Element oldConfig = descriptor.getConfig(); if (oldConfig == null) return false; Element newConfig = Descriptor.createConfigElement(toolWrapper); if (!JDOMUtil.areElementsEqual(oldConfig, newConfig)) { myAlarm.cancelAllRequests(); myAlarm.addRequest(new Runnable() { @Override public void run() { myTree.repaint(); } }, 300); myModified = true; return true; } return false; } private void updateProperSettingsForSelection() { final TreePath selectionPath = myTree.getSelectionPath(); if (selectionPath != null) { InspectionConfigTreeNode node = (InspectionConfigTreeNode)selectionPath.getLastPathComponent(); final Descriptor descriptor = node.getDescriptor(); if (descriptor != null) { final boolean properSetting = mySelectedProfile.isProperSetting(descriptor.getKey().toString()); if (node.isProperSetting() != properSetting) { myAlarm.cancelAllRequests(); myAlarm.addRequest(new Runnable() { @Override public void run() { myTree.repaint(); } }, 300); node.dropCache(); updateUpHierarchy(node, (InspectionConfigTreeNode)node.getParent()); } } } } private void initDescriptors() { final InspectionProfileImpl profile = mySelectedProfile; if (profile == null) return; myDescriptors.clear(); List<ScopeToolState> tools = profile.getDefaultStates(myProjectProfileManager.getProject()); for (ScopeToolState state : tools) { final ArrayList<Descriptor> descriptors = new ArrayList<Descriptor>(); if (state.getLevel() == HighlightDisplayLevel.NON_SWITCHABLE_ERROR) { continue; } Project project = myProjectProfileManager.getProject(); myDescriptors.put(new Descriptor(state, profile, project), descriptors); InspectionToolWrapper toolWrapper = state.getTool(); final List<ScopeToolState> nonDefaultTools = profile.getNonDefaultTools(toolWrapper.getShortName(), project); for (ScopeToolState nonDefaultToolState : nonDefaultTools) { descriptors.add(new Descriptor(nonDefaultToolState, profile, project)); } } } private void postProcessModification() { wereToolSettingsModified(); //resetup configs for (ScopeToolState state : mySelectedProfile.getAllTools(myProjectProfileManager.getProject())) { state.resetConfigPanel(); } fillTreeData(myProfileFilter.getFilter(), true); repaintTableData(); updateOptionsAndDescriptionPanel(myTree.getSelectionPath()); } @Nullable public static ModifiableModel createNewProfile(final int initValue, ModifiableModel selectedProfile, JPanel parent, String profileName, Set<String> existingProfileNames, @NotNull Project project) { profileName = Messages.showInputDialog(parent, profileName, "Create New Inspection Profile", Messages.getQuestionIcon()); if (profileName == null) return null; final ProfileManager profileManager = selectedProfile.getProfileManager(); if (existingProfileNames.contains(profileName)) { Messages.showErrorDialog(InspectionsBundle.message("inspection.unable.to.create.profile.message", profileName), InspectionsBundle.message("inspection.unable.to.create.profile.dialog.title")); return null; } InspectionProfileImpl inspectionProfile = new InspectionProfileImpl(profileName, InspectionToolRegistrar.getInstance(), profileManager); if (initValue == -1) { inspectionProfile.initInspectionTools(project); ModifiableModel profileModifiableModel = inspectionProfile.getModifiableModel(); final InspectionToolWrapper[] profileEntries = profileModifiableModel.getInspectionTools(null); for (InspectionToolWrapper toolWrapper : profileEntries) { profileModifiableModel.disableTool(toolWrapper.getShortName(), (NamedScope)null, project); } profileModifiableModel.setLocal(true); profileModifiableModel.setModified(true); return profileModifiableModel; } else if (initValue == 0) { inspectionProfile.copyFrom(selectedProfile); inspectionProfile.setName(profileName); inspectionProfile.initInspectionTools(project); inspectionProfile.setModified(true); return inspectionProfile; } return null; } public void setFilter(String filter) { myProfileFilter.setFilter(filter); } public void filterTree(String filter) { if (myTree != null) { getExpandedNodes(mySelectedProfile).saveVisibleState(myTree); fillTreeData(filter, true); reloadModel(); restoreTreeState(); if (myTree.getSelectionPath() == null) { TreeUtil.selectFirstNode(myTree); } } } private void reloadModel() { try { myIsInRestore = true; ((DefaultTreeModel)myTree.getModel()).reload(); } finally { myIsInRestore = false; } } private void restoreTreeState() { try { myIsInRestore = true; getExpandedNodes(mySelectedProfile).restoreVisibleState(myTree); } finally { myIsInRestore = false; } } private ActionToolbar createTreeToolbarPanel() { final CommonActionsManager actionManager = CommonActionsManager.getInstance(); DefaultActionGroup actions = new DefaultActionGroup(); actions.add(actionManager.createExpandAllAction(myTreeExpander, myTree)); actions.add(actionManager.createCollapseAllAction(myTreeExpander, myTree)); actions.add(new AnAction(CommonBundle.message("button.reset.to.default"), CommonBundle.message("button.reset.to.default"), AllIcons.General.Reset) { { registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_R, InputEvent.CTRL_MASK)), myTree); } @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(myRoot.isProperSetting()); } @Override public void actionPerformed(AnActionEvent e) { mySelectedProfile.resetToBase(myProjectProfileManager.getProject()); postProcessModification(); } }); actions.add(new AnAction("Reset to Empty", "Reset to empty", AllIcons.Actions.Reset_to_empty){ @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(mySelectedProfile != null && mySelectedProfile.isExecutable(myProjectProfileManager.getProject())); } @Override public void actionPerformed(AnActionEvent e) { mySelectedProfile.resetToEmpty(e.getProject()); postProcessModification(); } }); actions.add(new ToggleAction("Lock Profile", "Lock profile", AllIcons.Nodes.Padlock) { @Override public boolean isSelected(AnActionEvent e) { return mySelectedProfile != null && mySelectedProfile.isProfileLocked(); } @Override public void setSelected(AnActionEvent e, boolean state) { mySelectedProfile.lockProfile(state); } }); actions.addSeparator(); actions.add(new MyAddScopeAction()); actions.add(new MyDeleteScopeAction()); actions.add(new MoveScopeAction(myTree, "Move Scope Up", IconUtil.getMoveUpIcon(), -1) { @Override protected boolean isEnabledFor(int idx, InspectionConfigTreeNode parent) { return idx > 0; } @Override protected InspectionProfileImpl getSelectedProfile() { return mySelectedProfile; } }); actions.add(new MoveScopeAction(myTree, "Move Scope Down", IconUtil.getMoveDownIcon(), 1) { @Override protected boolean isEnabledFor(int idx, InspectionConfigTreeNode parent) { return idx < parent.getChildCount() - 2; } @Override protected InspectionProfileImpl getSelectedProfile() { return mySelectedProfile; } }); actions.addSeparator(); final ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, actions, true); actionToolbar.setTargetComponent(this); return actionToolbar; } private void repaintTableData() { if (myTree != null) { getExpandedNodes(mySelectedProfile).saveVisibleState(myTree); reloadModel(); restoreTreeState(); } } public void selectInspectionTool(String name) { final InspectionConfigTreeNode node = findNodeByKey(name, myRoot); if (node != null) { TreeUtil.showRowCentered(myTree, myTree.getRowForPath(new TreePath(node.getPath())) - 1, true);//myTree.isRootVisible ? 0 : 1; TreeUtil.selectNode(myTree, node); } } @Nullable private static InspectionConfigTreeNode findNodeByKey(String name, InspectionConfigTreeNode root) { for (int i = 0; i < root.getChildCount(); i++) { final InspectionConfigTreeNode child = (InspectionConfigTreeNode)root.getChildAt(i); final Descriptor descriptor = child.getDescriptor(); if (descriptor != null) { if (descriptor.getKey().toString().equals(name)) { return child; } } else { final InspectionConfigTreeNode node = findNodeByKey(name, child); if (node != null) return node; } } return null; } private JScrollPane initTreeScrollPane() { fillTreeData(null, true); final InspectionsConfigTreeRenderer renderer = new InspectionsConfigTreeRenderer(myProjectProfileManager.getProject()){ @Override protected String getFilter() { return myProfileFilter != null ? myProfileFilter.getFilter() : null; } }; myTree = new CheckboxTree(renderer, myRoot) { @Override public Dimension getPreferredScrollableViewportSize() { Dimension size = super.getPreferredScrollableViewportSize(); size = new Dimension(size.width + 10, size.height); return size; } @Override protected void onNodeStateChanged(final CheckedTreeNode node) { toggleToolNode((InspectionConfigTreeNode)node); } }; myTree.setCellRenderer(renderer); myTree.setRootVisible(false); myTree.setShowsRootHandles(true); UIUtil.setLineStyleAngled(myTree); TreeUtil.installActions(myTree); myTree.addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent e) { if (myTree.getSelectionPaths() != null && myTree.getSelectionPaths().length == 1) { updateOptionsAndDescriptionPanel(myTree.getSelectionPaths()[0]); } else { initOptionsAndDescriptionPanel(); } if (!myIsInRestore) { InspectionProfileImpl selected = mySelectedProfile; if (selected != null) { InspectionProfileImpl baseProfile = (InspectionProfileImpl)selected.getParentProfile(); if (baseProfile != null) { getExpandedNodes(baseProfile).setSelectionPaths(myTree.getSelectionPaths()); } getExpandedNodes(selected).setSelectionPaths(myTree.getSelectionPaths()); } } } }); myTree.addMouseListener(new PopupHandler() { @Override public void invokePopup(Component comp, int x, int y) { final int[] selectionRows = myTree.getSelectionRows(); if (selectionRows != null && myTree.getPathForLocation(x, y) != null && Arrays.binarySearch(selectionRows, myTree.getRowForLocation(x, y)) > -1) { compoundPopup().show(comp, x, y); } } }); new TreeSpeedSearch(myTree, new Convertor<TreePath, String>() { @Override public String convert(TreePath o) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)o.getLastPathComponent(); final Descriptor descriptor = node.getDescriptor(); return descriptor != null ? InspectionsConfigTreeComparator.getDisplayTextToSort(descriptor.getText()) : InspectionsConfigTreeComparator .getDisplayTextToSort(node.getGroupName()); } }); myTree.setSelectionModel(new DefaultTreeSelectionModel()); final JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myTree); scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED); TreeUtil.collapseAll(myTree, 1); myTree.addTreeExpansionListener(new TreeExpansionListener() { @Override public void treeCollapsed(TreeExpansionEvent event) { InspectionProfileImpl selected = mySelectedProfile; final InspectionConfigTreeNode node = (InspectionConfigTreeNode)event.getPath().getLastPathComponent(); final InspectionProfileImpl parentProfile = (InspectionProfileImpl)selected.getParentProfile(); if (parentProfile != null) { getExpandedNodes(parentProfile).saveVisibleState(myTree); } getExpandedNodes(selected).saveVisibleState(myTree); } @Override public void treeExpanded(TreeExpansionEvent event) { InspectionProfileImpl selected = mySelectedProfile; if (selected != null) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)event.getPath().getLastPathComponent(); final InspectionProfileImpl parentProfile = (InspectionProfileImpl)selected.getParentProfile(); if (parentProfile != null) { getExpandedNodes(parentProfile).expandNode(node); } getExpandedNodes(selected).expandNode(node); } } }); myTreeExpander = new DefaultTreeExpander(myTree); myProfileFilter = new MyFilterComponent(); return scrollPane; } private JPopupMenu compoundPopup() { final DefaultActionGroup group = new DefaultActionGroup(); final SeverityRegistrar severityRegistrar = ((SeverityProvider)mySelectedProfile.getProfileManager()).getOwnSeverityRegistrar(); TreeSet<HighlightSeverity> severities = new TreeSet<HighlightSeverity>(severityRegistrar); severities.add(HighlightSeverity.ERROR); severities.add(HighlightSeverity.WARNING); severities.add(HighlightSeverity.WEAK_WARNING); final Collection<SeverityRegistrar.SeverityBasedTextAttributes> infoTypes = SeverityUtil.getRegisteredHighlightingInfoTypes(severityRegistrar); for (SeverityRegistrar.SeverityBasedTextAttributes info : infoTypes) { severities.add(info.getSeverity()); } for (HighlightSeverity severity : severities) { final HighlightDisplayLevel level = HighlightDisplayLevel.find(severity); group.add(new AnAction(renderSeverity(severity), renderSeverity(severity), level.getIcon()) { @Override public void actionPerformed(AnActionEvent e) { setNewHighlightingLevel(level); } }); } group.add(Separator.getInstance()); group.add(new MyAddScopeAction()); group.add(new MyDeleteScopeAction()); ActionPopupMenu menu = ActionManager.getInstance().createActionPopupMenu(ActionPlaces.UNKNOWN, group); return menu.getComponent(); } static String renderSeverity(HighlightSeverity severity) { return StringUtil.capitalizeWords(severity.toString().toLowerCase(), true); } private void toggleToolNode(final InspectionConfigTreeNode toolNode) { final Descriptor descriptor = toolNode.getDescriptor(); Project project = myProjectProfileManager.getProject(); if (descriptor!= null) { final HighlightDisplayKey key = descriptor.getKey(); final String toolShortName = key.toString(); if (toolNode.isChecked()) { if (toolNode.getScope(project) != null){ if (toolNode.isByDefault()) { mySelectedProfile.enableToolByDefault(toolShortName, project); } else { mySelectedProfile.enableTool(toolShortName, toolNode.getScope(project), project); } } else { mySelectedProfile.enableTool(toolShortName, project); } } else { if (toolNode.getScope(project) != null) { if (toolNode.isByDefault()) { mySelectedProfile.disableToolByDefault(toolShortName, project); } else { mySelectedProfile.disableTool(toolShortName, toolNode.getScope(project), project); } } else if (toolNode.getChildCount() == 0){ //default node and no scopes configured mySelectedProfile.disableTool(toolShortName, project); } } toolNode.dropCache(); updateUpHierarchy(toolNode, (InspectionConfigTreeNode)toolNode.getParent()); } final TreePath path = new TreePath(toolNode.getPath()); if (Comparing.equal(myTree.getSelectionPath(), path)) { updateOptionsAndDescriptionPanel(path); } } private static void updateUpHierarchy(final InspectionConfigTreeNode node, final InspectionConfigTreeNode parent) { if (parent != null) { parent.dropCache(); updateUpHierarchy(parent, (InspectionConfigTreeNode)parent.getParent()); } } private static boolean isDescriptorAccepted(Descriptor descriptor, @NonNls String filter, final boolean forceInclude, final List<Set<String>> keySetList, final Set<String> quoted) { filter = filter.toLowerCase(); if (StringUtil.containsIgnoreCase(descriptor.getText(), filter)) { return true; } final String[] groupPath = descriptor.getGroup(); for (String group : groupPath) { if (StringUtil.containsIgnoreCase(group, filter)) { return true; } } for (String stripped : quoted) { if (StringUtil.containsIgnoreCase(descriptor.getText(),stripped)) { return true; } for (String group : groupPath) { if (StringUtil.containsIgnoreCase(group,stripped)) { return true; } } final String description = descriptor.getToolWrapper().loadDescription(); if (description != null && StringUtil.containsIgnoreCase(description.toLowerCase(), stripped)) { if (!forceInclude) return true; } else if (forceInclude) return false; } for (Set<String> keySet : keySetList) { if (keySet.contains(descriptor.getKey().toString())) { if (!forceInclude) { return true; } } else { if (forceInclude) { return false; } } } return forceInclude; } private void fillTreeData(String filter, boolean forceInclude) { if (mySelectedProfile == null) return; myRoot.removeAllChildren(); myRoot.setChecked(false); myRoot.dropCache(); List<Set<String>> keySetList = new ArrayList<Set<String>>(); final Set<String> quated = new HashSet<String>(); if (filter != null && !filter.isEmpty()) { keySetList.addAll(SearchUtil.findKeys(filter, quated)); } Project project = myProjectProfileManager.getProject(); for (Descriptor descriptor : myDescriptors.keySet()) { if (filter != null && !filter.isEmpty() && !isDescriptorAccepted(descriptor, filter, forceInclude, keySetList, quated)) { continue; } final List<ScopeToolState> nonDefaultTools = mySelectedProfile.getNonDefaultTools(descriptor.getKey().toString(), project); final HighlightDisplayKey key = descriptor.getKey(); final boolean enabled = mySelectedProfile.isToolEnabled(key); boolean hasNonDefaultScope = !nonDefaultTools.isEmpty(); final InspectionConfigTreeNode node = new InspectionConfigTreeNode(descriptor, null, !hasNonDefaultScope, enabled, !hasNonDefaultScope); getGroupNode(myRoot, descriptor.getGroup()).add(node); if (hasNonDefaultScope) { for (ScopeToolState nonDefaultState : nonDefaultTools) { node.add(new InspectionConfigTreeNode(new Descriptor(nonDefaultState, mySelectedProfile, project), nonDefaultState, false, false)); } node.add(new InspectionConfigTreeNode(descriptor, descriptor.getState(), true, false)); } myRoot.setEnabled(myRoot.isEnabled() || enabled); myRoot.dropCache(); } if (filter != null && forceInclude && myRoot.getChildCount() == 0) { final Set<String> filters = SearchableOptionsRegistrar.getInstance().getProcessedWords(filter); if (filters.size() > 1) { fillTreeData(filter, false); } } TreeUtil.sort(myRoot, new InspectionsConfigTreeComparator()); } private void updateOptionsAndDescriptionPanel(TreePath path) { if (path == null) return; final InspectionConfigTreeNode node = (InspectionConfigTreeNode)path.getLastPathComponent(); final Descriptor descriptor = node.getDescriptor(); if (descriptor != null) { final String description = descriptor.loadDescription(); if (description != null) { // need this in order to correctly load plugin-supplied descriptions try { final HintHint hintHint = new HintHint(myBrowser, new Point(0, 0)); hintHint.setFont(myBrowser.getFont()); myBrowser.read(new StringReader(SearchUtil.markup(HintUtil.prepareHintText(description, hintHint), myProfileFilter.getFilter())), null); } catch (IOException e2) { try { //noinspection HardCodedStringLiteral myBrowser.read(new StringReader("<html><body><b>" + UNDER_CONSTRUCTION + "</b></body></html>"), null); } catch (IOException e1) { //Can't be } } } else { try { myBrowser.read(new StringReader(EMPTY_HTML), null); } catch (IOException e1) { //Can't be } } myOptionsPanel.removeAll(); final NamedScope scope = node.getScope(myProjectProfileManager.getProject()); if (scope != null || node.isInspectionNode()) { final HighlightDisplayKey key = descriptor.getKey(); final LevelChooser chooser = new LevelChooser(((SeverityProvider)mySelectedProfile.getProfileManager()).getOwnSeverityRegistrar()) { @Override public Dimension getPreferredSize() { Dimension preferredSize = super.getPreferredSize(); return new Dimension(Math.min(300, preferredSize.width), preferredSize.height); } @Override public Dimension getMinimumSize() { return getPreferredSize(); } }; chooser.getComboBox().addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Project project = myProjectProfileManager.getProject(); boolean toUpdate = mySelectedProfile.getErrorLevel(key, scope, project) != chooser.getLevel(); mySelectedProfile.setErrorLevel(key, chooser.getLevel(), node.isInspectionNode() || node.isByDefault() ? -1 : node.getParent().getIndex(node), project); if (toUpdate) node.dropCache(); } }); chooser.setLevel(mySelectedProfile.getErrorLevel(key, scope, myProjectProfileManager.getProject())); final JPanel withSeverity = new JPanel(new GridBagLayout()); withSeverity.add(new JLabel(InspectionsBundle.message("inspection.severity")), new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 10, 10), 0, 0)); withSeverity.add(chooser, new GridBagConstraints(1, 0, 1, 1, 1.0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 10, 0), 0, 0)); final JComponent comp = descriptor.getState().getAdditionalConfigPanel(); withSeverity.add(comp, new GridBagConstraints(0, 1, 2, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); myOptionsPanel.add(withSeverity, BorderLayout.CENTER); } myOptionsPanel.revalidate(); GuiUtils.enableChildren(myOptionsPanel, node.isChecked()); } else { initOptionsAndDescriptionPanel(); } myOptionsPanel.repaint(); } private void initOptionsAndDescriptionPanel() { myOptionsPanel.removeAll(); try { myBrowser.read(new StringReader(EMPTY_HTML), null); } catch (IOException e1) { //Can't be } myOptionsPanel.validate(); myOptionsPanel.repaint(); } private static InspectionConfigTreeNode getGroupNode(InspectionConfigTreeNode root, String[] groupPath) { InspectionConfigTreeNode currentRoot = root; for (final String group : groupPath) { currentRoot = getGroupNode(currentRoot, group); } return currentRoot; } private static InspectionConfigTreeNode getGroupNode(InspectionConfigTreeNode root, String group) { final int childCount = root.getChildCount(); for (int i = 0; i < childCount; i++) { InspectionConfigTreeNode child = (InspectionConfigTreeNode)root.getChildAt(i); if (group.equals(child.getUserObject())) { return child; } } InspectionConfigTreeNode child = new InspectionConfigTreeNode(group, null, false, false); root.add(child); return child; } public boolean setSelectedProfileModified(boolean modified) { mySelectedProfile.setModified(modified); return modified; } ModifiableModel getSelectedProfile() { return mySelectedProfile; } private void setSelectedProfile(final ModifiableModel modifiableModel) { if (mySelectedProfile == modifiableModel) return; mySelectedProfile = (InspectionProfileImpl)modifiableModel; if (mySelectedProfile != null) { myInitialProfile = mySelectedProfile.getName(); } initDescriptors(); filterTree(myProfileFilter != null ? myProfileFilter.getFilter() : null); } @Override public Dimension getPreferredSize() { return new Dimension(700, 500); } public void disposeUI() { if (myInspectionProfilePanel == null) { return; } myProperties.setFloat(VERTICAL_DIVIDER_PROPORTION, myMainSplitter.getProportion()); myProperties.setFloat(HORIZONTAL_DIVIDER_PROPORTION, myRightSplitter.getProportion()); myAlarm.cancelAllRequests(); myProfileFilter.dispose(); if (mySelectedProfile != null) { for (ScopeToolState state : mySelectedProfile.getAllTools(myProjectProfileManager.getProject())) { state.resetConfigPanel(); } } mySelectedProfile = null; } private JPanel createInspectionProfileSettingsPanel() { myBrowser = new JEditorPane(UIUtil.HTML_MIME, EMPTY_HTML); myBrowser.setEditable(false); myBrowser.setBorder(IdeBorderFactory.createEmptyBorder(5, 5, 5, 5)); myBrowser.addHyperlinkListener(new BrowserHyperlinkListener()); initDescriptors(); fillTreeData(myProfileFilter != null ? myProfileFilter.getFilter() : null, true); JPanel descriptionPanel = new JPanel(new BorderLayout()); descriptionPanel.setBorder(IdeBorderFactory.createTitledBorder(InspectionsBundle.message("inspection.description.title"), false, new Insets(13, 0, 0, 0))); descriptionPanel.add(ScrollPaneFactory.createScrollPane(myBrowser), BorderLayout.CENTER); myRightSplitter = new Splitter(true); myRightSplitter.setFirstComponent(descriptionPanel); myRightSplitter.setProportion(myProperties.getFloat(HORIZONTAL_DIVIDER_PROPORTION, 0.5f)); myOptionsPanel = new JPanel(new BorderLayout()); myOptionsPanel.setBorder(IdeBorderFactory.createTitledBorder("Options", false, new Insets(0, 0, 0, 0))); initOptionsAndDescriptionPanel(); myRightSplitter.setSecondComponent(myOptionsPanel); myRightSplitter.setHonorComponentsMinimumSize(true); final JPanel treePanel = new JPanel(new BorderLayout()); final JScrollPane tree = initTreeScrollPane(); treePanel.add(tree, BorderLayout.CENTER); final JPanel northPanel = new JPanel(new GridBagLayout()); northPanel.setBorder(IdeBorderFactory.createEmptyBorder(2, 0, 2, 0)); northPanel.add(createTreeToolbarPanel().getComponent(), new GridBagConstraints(0, 0, 1, 1, 0.5, 1, GridBagConstraints.BASELINE_LEADING, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); northPanel.add(myProfileFilter, new GridBagConstraints(1, 0, 1, 1, 1, 1, GridBagConstraints.BASELINE_TRAILING, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0)); treePanel.add(northPanel, BorderLayout.NORTH); myMainSplitter = new Splitter(false); myMainSplitter.setFirstComponent(treePanel); myMainSplitter.setSecondComponent(myRightSplitter); myMainSplitter.setHonorComponentsMinimumSize(false); myMainSplitter.setProportion(myProperties.getFloat(VERTICAL_DIVIDER_PROPORTION, 0.5f)); final JPanel panel = new JPanel(new BorderLayout()); panel.add(myMainSplitter, BorderLayout.CENTER); return panel; } public boolean isModified() { if (myModified) return true; if (mySelectedProfile.isChanged()) return true; if (myShareProfile != (mySelectedProfile.getProfileManager() == myProjectProfileManager)) return true; if (!Comparing.strEqual(myInitialProfile, mySelectedProfile.getName())) return true; if (descriptorsAreChanged()) { return true; } return false; } public void reset() { myModified = false; setSelectedProfile(mySelectedProfile); final String filter = myProfileFilter.getFilter(); myProfileFilter.reset(); myProfileFilter.setSelectedItem(filter); myShareProfile = mySelectedProfile.getProfileManager() == myProjectProfileManager; } public void apply() throws ConfigurationException { final boolean modified = isModified(); if (!modified) { return; } final ModifiableModel selectedProfile = getSelectedProfile(); final ProfileManager profileManager = myShareProfile ? myProjectProfileManager : InspectionProfileManager.getInstance(); selectedProfile.setLocal(!myShareProfile); if (selectedProfile.getProfileManager() != profileManager) { if (selectedProfile.getProfileManager().getProfile(selectedProfile.getName(), false) != null) { selectedProfile.getProfileManager().deleteProfile(selectedProfile.getName()); } copyUsedSeveritiesIfUndefined(selectedProfile, profileManager); selectedProfile.setProfileManager(profileManager); } final InspectionProfile parentProfile = selectedProfile.getParentProfile(); if (((InspectionProfileManagerImpl)InspectionProfileManager.getInstance()).getSchemesManager().isShared(selectedProfile)) { if (descriptorsAreChanged()) { throw new ConfigurationException("Shared profile cannot be modified. Please do \"Save As...\" first."); } } try { selectedProfile.commit(); } catch (IOException e) { throw new ConfigurationException(e.getMessage()); } setSelectedProfile(parentProfile.getModifiableModel()); setSelectedProfileModified(false); myModified = false; } private static void copyUsedSeveritiesIfUndefined(final ModifiableModel selectedProfile, final ProfileManager profileManager) { final SeverityRegistrar registrar = ((SeverityProvider)profileManager).getSeverityRegistrar(); final Set<HighlightSeverity> severities = ((InspectionProfileImpl)selectedProfile).getUsedSeverities(); for (Iterator<HighlightSeverity> iterator = severities.iterator(); iterator.hasNext();) { HighlightSeverity severity = iterator.next(); if (registrar.isSeverityValid(severity.toString())) { iterator.remove(); } } if (!severities.isEmpty()) { final SeverityRegistrar oppositeRegister = ((SeverityProvider)selectedProfile.getProfileManager()).getSeverityRegistrar(); for (HighlightSeverity severity : severities) { final TextAttributesKey attributesKey = TextAttributesKey.find(severity.toString()); final TextAttributes textAttributes = oppositeRegister.getTextAttributesBySeverity(severity); LOG.assertTrue(textAttributes != null); HighlightInfoType.HighlightInfoTypeImpl info = new HighlightInfoType.HighlightInfoTypeImpl(severity, attributesKey); registrar.registerSeverity(new SeverityRegistrar.SeverityBasedTextAttributes(textAttributes.clone(), info), textAttributes.getErrorStripeColor()); } } } private boolean descriptorsAreChanged() { for (Map.Entry<Descriptor, List<Descriptor>> entry : myDescriptors.entrySet()) { Descriptor desc = entry.getKey(); Project project = myProjectProfileManager.getProject(); if (mySelectedProfile.isToolEnabled(desc.getKey(), (NamedScope)null, project) != desc.isEnabled()){ return true; } if (mySelectedProfile.getErrorLevel(desc.getKey(), desc.getScope(), project) != desc.getLevel()) { return true; } final List<Descriptor> descriptors = entry.getValue(); for (Descriptor descriptor : descriptors) { if (mySelectedProfile.isToolEnabled(descriptor.getKey(), descriptor.getScope(), project) != descriptor.isEnabled()) { return true; } if (mySelectedProfile.getErrorLevel(descriptor.getKey(), descriptor.getScope(), project) != descriptor.getLevel()) { return true; } } final List<ScopeToolState> tools = mySelectedProfile.getNonDefaultTools(desc.getKey().toString(), project); if (tools.size() != descriptors.size()) { return true; } for (int i = 0; i < tools.size(); i++) { final ScopeToolState pair = tools.get(i); if (!Comparing.equal(pair.getScope(project), descriptors.get(i).getScope())) { return true; } } } return false; } public Tree getTree() { return myTree; } public boolean isProfileShared() { return myShareProfile; } public void setProfileShared(boolean profileShared) { myShareProfile = profileShared; } @Override public void setVisible(boolean aFlag) { if (aFlag && myInspectionProfilePanel == null) { initUI(); } super.setVisible(aFlag); } private void setNewHighlightingLevel(@NotNull HighlightDisplayLevel level) { final int[] rows = myTree.getSelectionRows(); final boolean showOptionsAndDescriptorPanels = rows != null && rows.length == 1; for (int i = 0; rows != null && i < rows.length; i++) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)myTree.getPathForRow(rows[i]).getLastPathComponent(); final InspectionConfigTreeNode parent = (InspectionConfigTreeNode)node.getParent(); final Object userObject = node.getUserObject(); if (userObject instanceof Descriptor && (node.getScopeName() != null || node.isLeaf())) { updateErrorLevel(node, showOptionsAndDescriptorPanels, level); updateUpHierarchy(node, parent); } else { updateErrorLevelUpInHierarchy(level, showOptionsAndDescriptorPanels, node); updateUpHierarchy(node, parent); } } if (rows != null && rows.length == 1) { updateOptionsAndDescriptionPanel(myTree.getPathForRow(rows[0])); } else { initOptionsAndDescriptionPanel(); } repaintTableData(); } private void updateErrorLevelUpInHierarchy(@NotNull HighlightDisplayLevel level, boolean showOptionsAndDescriptorPanels, InspectionConfigTreeNode node) { node.dropCache(); for (int j = 0; j < node.getChildCount(); j++) { final InspectionConfigTreeNode child = (InspectionConfigTreeNode)node.getChildAt(j); final Object userObject = child.getUserObject(); if (userObject instanceof Descriptor && (child.getScopeName() != null || child.isLeaf())) { updateErrorLevel(child, showOptionsAndDescriptorPanels, level); } else { updateErrorLevelUpInHierarchy(level, showOptionsAndDescriptorPanels, child); } } } private void updateErrorLevel(final InspectionConfigTreeNode child, final boolean showOptionsAndDescriptorPanels, @NotNull HighlightDisplayLevel level) { final HighlightDisplayKey key = child.getDescriptor().getKey(); mySelectedProfile.setErrorLevel(key, level, child.isInspectionNode() || child.isByDefault() ? -1 : child.getParent().getIndex(child), myProjectProfileManager.getProject()); child.dropCache(); if (showOptionsAndDescriptorPanels) { updateOptionsAndDescriptionPanel(new TreePath(child.getPath())); } } private class MyFilterComponent extends FilterComponent { private MyFilterComponent() { super(INSPECTION_FILTER_HISTORY, 10); setHistory(Arrays.asList("\"New in 12\"")); } @Override public void filter() { filterTree(getFilter()); } @Override protected void onlineFilter() { if (mySelectedProfile == null) return; final String filter = getFilter(); getExpandedNodes(mySelectedProfile).saveVisibleState(myTree); fillTreeData(filter, true); reloadModel(); if (filter == null || filter.isEmpty()) { restoreTreeState(); } else { TreeUtil.expandAll(myTree); } } } private class MyAddScopeAction extends AddScopeAction { public MyAddScopeAction() { super(SingleInspectionProfilePanel.this.myTree); } @Override protected InspectionProfileImpl getSelectedProfile() { return mySelectedProfile; } @Override public void actionPerformed(AnActionEvent e) { super.actionPerformed(e); final TreePath[] paths = myTree.getSelectionPaths(); if (paths != null && paths.length == 1) { updateOptionsAndDescriptionPanel(myTree.getSelectionPath()); } else { initOptionsAndDescriptionPanel(); } } } private class MyDeleteScopeAction extends DeleteScopeAction { public MyDeleteScopeAction() { super(SingleInspectionProfilePanel.this.myTree); } @Override protected InspectionProfileImpl getSelectedProfile() { return mySelectedProfile; } } }
"New in 13" (IDEA-110065)
platform/lang-impl/src/com/intellij/profile/codeInspection/ui/SingleInspectionProfilePanel.java
"New in 13" (IDEA-110065)
Java
apache-2.0
d16b2545bcbb78dc58e01b62f7c0166c0b9c8b53
0
skekre98/apex-mlhr,ilganeli/incubator-apex-malhar,sandeep-n/incubator-apex-malhar,brightchen/apex-malhar,apache/incubator-apex-malhar,vrozov/apex-malhar,tweise/incubator-apex-malhar,ilganeli/incubator-apex-malhar,tweise/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,brightchen/apex-malhar,patilvikram/apex-malhar,ananthc/apex-malhar,siyuanh/apex-malhar,yogidevendra/incubator-apex-malhar,apache/incubator-apex-malhar,chandnisingh/apex-malhar,PramodSSImmaneni/apex-malhar,ananthc/apex-malhar,patilvikram/apex-malhar,yogidevendra/apex-malhar,trusli/apex-malhar,ananthc/apex-malhar,tushargosavi/incubator-apex-malhar,skekre98/apex-mlhr,patilvikram/apex-malhar,tweise/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,chandnisingh/apex-malhar,prasannapramod/apex-malhar,PramodSSImmaneni/apex-malhar,siyuanh/incubator-apex-malhar,patilvikram/apex-malhar,vrozov/apex-malhar,siyuanh/apex-malhar,tushargosavi/incubator-apex-malhar,ananthc/apex-malhar,ilganeli/incubator-apex-malhar,tweise/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,tweise/incubator-apex-malhar,prasannapramod/apex-malhar,tweise/apex-malhar,vrozov/apex-malhar,yogidevendra/incubator-apex-malhar,siyuanh/apex-malhar,vrozov/apex-malhar,skekre98/apex-mlhr,vrozov/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,sandeep-n/incubator-apex-malhar,siyuanh/apex-malhar,vrozov/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,prasannapramod/apex-malhar,brightchen/apex-malhar,trusli/apex-malhar,siyuanh/incubator-apex-malhar,PramodSSImmaneni/apex-malhar,chandnisingh/apex-malhar,davidyan74/apex-malhar,trusli/apex-malhar,yogidevendra/apex-malhar,tweise/incubator-apex-malhar,apache/incubator-apex-malhar,sandeep-n/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,apache/incubator-apex-malhar,brightchen/apex-malhar,ananthc/apex-malhar,siyuanh/incubator-apex-malhar,vrozov/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,skekre98/apex-mlhr,siyuanh/incubator-apex-malhar,trusli/apex-malhar,apache/incubator-apex-malhar,davidyan74/apex-malhar,chinmaykolhatkar/apex-malhar,prasannapramod/apex-malhar,brightchen/apex-malhar,prasannapramod/apex-malhar,yogidevendra/apex-malhar,ananthc/apex-malhar,siyuanh/apex-malhar,vrozov/apex-malhar,yogidevendra/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,davidyan74/apex-malhar,DataTorrent/incubator-apex-malhar,vrozov/incubator-apex-malhar,siyuanh/apex-malhar,chandnisingh/apex-malhar,PramodSSImmaneni/apex-malhar,vrozov/incubator-apex-malhar,trusli/apex-malhar,sandeep-n/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,ilganeli/incubator-apex-malhar,PramodSSImmaneni/apex-malhar,apache/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,ilganeli/incubator-apex-malhar,patilvikram/apex-malhar,sandeep-n/incubator-apex-malhar,patilvikram/apex-malhar,DataTorrent/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,PramodSSImmaneni/incubator-apex-malhar,tweise/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,vrozov/apex-malhar,DataTorrent/incubator-apex-malhar,trusli/apex-malhar,skekre98/apex-mlhr,PramodSSImmaneni/incubator-apex-malhar,vrozov/incubator-apex-malhar,ilganeli/incubator-apex-malhar,siyuanh/incubator-apex-malhar,chandnisingh/apex-malhar,tweise/apex-malhar,siyuanh/incubator-apex-malhar,yogidevendra/apex-malhar,DataTorrent/Megh,brightchen/apex-malhar,patilvikram/apex-malhar,ilganeli/incubator-apex-malhar,davidyan74/apex-malhar,brightchen/apex-malhar,sandeep-n/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,yogidevendra/apex-malhar,prasannapramod/apex-malhar,PramodSSImmaneni/apex-malhar,skekre98/apex-mlhr,trusli/apex-malhar,chinmaykolhatkar/apex-malhar,siyuanh/apex-malhar,chandnisingh/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,chinmaykolhatkar/apex-malhar,siyuanh/incubator-apex-malhar,chinmaykolhatkar/incubator-apex-malhar,apache/incubator-apex-malhar,tushargosavi/incubator-apex-malhar,DataTorrent/Megh,tweise/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,davidyan74/apex-malhar,chinmaykolhatkar/apex-malhar,PramodSSImmaneni/incubator-apex-malhar,tweise/incubator-apex-malhar,davidyan74/apex-malhar,tweise/apex-malhar,vrozov/incubator-apex-malhar,tweise/apex-malhar,PramodSSImmaneni/apex-malhar,yogidevendra/apex-malhar,chandnisingh/apex-malhar,chinmaykolhatkar/incubator-apex-malhar,DataTorrent/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,yogidevendra/incubator-apex-malhar,sandeep-n/incubator-apex-malhar
/* * Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datatorrent.lib.io.fs; import com.datatorrent.api.Context.CountersAggregator; import com.datatorrent.api.Context.OperatorContext; import com.datatorrent.api.DefaultPartition; import com.datatorrent.api.InputOperator; import com.datatorrent.api.Partitioner; import com.datatorrent.api.StatsListener; import com.datatorrent.lib.counters.BasicCounters; import com.esotericsoftware.kryo.Kryo; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.validation.constraints.NotNull; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.mutable.MutableLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Input operator that reads files from a directory. * <p/> * Derived class defines how to read entries from the input stream and emit to the port. * <p/> * The directory scanning logic is pluggable to support custom directory layouts and naming schemes. The default * implementation scans a single directory. * <p/> * Fault tolerant by tracking previously read files and current offset as part of checkpoint state. In case of failure * the operator will skip files that were already processed and fast forward to the offset of the current file. * <p/> * Supports partitioning and dynamic changes to number of partitions through property {@link #partitionCount}. The * directory scanner is responsible to only accept the files that belong to a partition. * <p/> * This class supports retrying of failed files by putting them into failed list, and retrying them after pending * files are processed. Retrying is disabled when maxRetryCount is set to zero. * @param <T> The type of the object that this input operator reads. * @since 1.0.2 */ public abstract class AbstractFSDirectoryInputOperator<T> implements InputOperator, Partitioner<AbstractFSDirectoryInputOperator<T>>, StatsListener { private static final Logger LOG = LoggerFactory.getLogger(AbstractFSDirectoryInputOperator.class); @NotNull protected String directory; @NotNull protected DirectoryScanner scanner = new DirectoryScanner(); protected int scanIntervalMillis = 5000; protected int offset; protected String currentFile; protected Set<String> processedFiles = new HashSet<String>(); protected int emitBatchSize = 1000; protected int currentPartitions = 1 ; protected int partitionCount = 1; private int retryCount = 0; private int maxRetryCount = 5; transient protected int skipCount = 0; private transient OperatorContext context; private BasicCounters<MutableLong> fileCounters = new BasicCounters<MutableLong>(MutableLong.class); protected MutableLong globalNumberOfFailures = new MutableLong(); protected MutableLong localNumberOfFailures = new MutableLong(); protected MutableLong globalNumberOfRetries = new MutableLong(); protected MutableLong localNumberOfRetries = new MutableLong(); private transient MutableLong globalProcessedFileCount = new MutableLong(); private transient MutableLong localProcessedFileCount = new MutableLong(); private transient MutableLong pendingFileCount = new MutableLong(); /** * Class representing failed file, When read fails on a file in middle, then the file is * added to failedList along with last read offset. * The files from failedList will be processed after all pendingFiles are processed, but * before checking for new files. * failed file is retried for maxRetryCount number of times, after that the file is * ignored. */ protected static class FailedFile { String path; int offset; int retryCount; long lastFailedTime; /* For kryo serialization */ protected FailedFile() {} protected FailedFile(String path, int offset) { this.path = path; this.offset = offset; this.retryCount = 0; } protected FailedFile(String path, int offset, int retryCount) { this.path = path; this.offset = offset; this.retryCount = retryCount; } @Override public String toString() { return "FailedFile[" + "path='" + path + '\'' + ", offset=" + offset + ", retryCount=" + retryCount + ", lastFailedTime=" + lastFailedTime + ']'; } } /** * Enums for aggregated counters about file processing. * <p/> * Contains the enums representing number of files processed, number of * pending files, number of file errors, and number of retries. * <p/> * @since 1.0.4 */ public static enum AggregatedFileCounters { /** * The number of files processed by the logical operator up until this. * point in time */ PROCESSED_FILES, /** * The number of files waiting to be processed by the logical operator. */ PENDING_FILES, /** * The number of IO errors encountered by the logical operator. */ NUMBER_OF_ERRORS, /** * The number of times the logical operator tried to resume reading a file * on which it encountered an error. */ NUMBER_OF_RETRIES } /** * The enums used to track statistics about the * AbstractFSDirectoryInputOperator. */ protected static enum FileCounters { /** * The number of files that were in the processed list up to the last * repartition of the operator. */ GLOBAL_PROCESSED_FILES, /** * The number of files added to the processed list by the physical operator * since the last repartition. */ LOCAL_PROCESSED_FILES, /** * The number of io errors encountered up to the last repartition of the * operator. */ GLOBAL_NUMBER_OF_FAILURES, /** * The number of failures encountered by the physical operator since the * last repartition. */ LOCAL_NUMBER_OF_FAILURES, /** * The number of retries encountered by the physical operator up to the last * repartition. */ GLOBAL_NUMBER_OF_RETRIES, /** * The number of retries encountered by the physical operator since the last * repartition. */ LOCAL_NUMBER_OF_RETRIES, /** * The number of files pending on the physical operator. */ PENDING_FILES } /** * A counter aggregator for AbstractFSDirectoryInputOperator. * <p/> * In order for this CountersAggregator to be used on your operator, you must * set it within your application like this. * <p/> * <code> * dag.getOperatorMeta("fsinputoperator").getAttributes().put(OperatorContext.COUNTERS_AGGREGATOR, * new AbstractFSDirectoryInputOperator.FileCountersAggregator()); * </code> * <p/> * The value of the aggregated counter can be retrieved by issuing a get * request to the host running your gateway like this. * <p/> * <code> * http://&lt;your host&gt;:9090/ws/v1/applications/&lt;your app id&gt;/logicalPlan/operators/&lt;operatorname&gt;/aggregation * </code> * <p/> * @since 1.0.4 */ public final static class FileCountersAggregator implements CountersAggregator, Serializable { private static final long serialVersionUID = 201409041428L; MutableLong totalLocalProcessedFiles = new MutableLong(); MutableLong pendingFiles = new MutableLong(); MutableLong totalLocalNumberOfFailures = new MutableLong(); MutableLong totalLocalNumberOfRetries = new MutableLong(); @Override @SuppressWarnings("unchecked") public Object aggregate(Collection<?> countersList) { if(countersList.isEmpty()) { return null; } BasicCounters<MutableLong> tempFileCounters = (BasicCounters<MutableLong>) countersList.iterator().next(); MutableLong globalProcessedFiles = tempFileCounters.getCounter(FileCounters.GLOBAL_PROCESSED_FILES); MutableLong globalNumberOfFailures = tempFileCounters.getCounter(FileCounters.GLOBAL_NUMBER_OF_FAILURES); MutableLong globalNumberOfRetries = tempFileCounters.getCounter(FileCounters.GLOBAL_NUMBER_OF_RETRIES); totalLocalProcessedFiles.setValue(0); pendingFiles.setValue(0); totalLocalNumberOfFailures.setValue(0); totalLocalNumberOfRetries.setValue(0); for(Object fileCounters: countersList) { BasicCounters<MutableLong> basicFileCounters = (BasicCounters<MutableLong>) fileCounters; totalLocalProcessedFiles.add(basicFileCounters.getCounter(FileCounters.LOCAL_PROCESSED_FILES)); pendingFiles.add(basicFileCounters.getCounter(FileCounters.PENDING_FILES)); totalLocalNumberOfFailures.add(basicFileCounters.getCounter(FileCounters.LOCAL_NUMBER_OF_FAILURES)); totalLocalNumberOfRetries.add(basicFileCounters.getCounter(FileCounters.LOCAL_NUMBER_OF_RETRIES)); } globalProcessedFiles.add(totalLocalProcessedFiles); globalProcessedFiles.subtract(pendingFiles); globalNumberOfFailures.add(totalLocalNumberOfFailures); globalNumberOfRetries.add(totalLocalNumberOfRetries); BasicCounters<MutableLong> aggregatedCounters = new BasicCounters<MutableLong>(MutableLong.class); aggregatedCounters.setCounter(AggregatedFileCounters.PROCESSED_FILES, globalProcessedFiles); aggregatedCounters.setCounter(AggregatedFileCounters.PENDING_FILES, pendingFiles); aggregatedCounters.setCounter(AggregatedFileCounters.NUMBER_OF_ERRORS, totalLocalNumberOfFailures); aggregatedCounters.setCounter(AggregatedFileCounters.NUMBER_OF_RETRIES, totalLocalNumberOfRetries); return aggregatedCounters; } } protected long lastRepartition = 0; private transient boolean emit = true; protected boolean idempotentEmit = false; /* List of unfinished files */ protected Queue<FailedFile> unfinishedFiles = new LinkedList<FailedFile>(); /* List of failed file */ protected Queue<FailedFile> failedFiles = new LinkedList<FailedFile>(); protected transient FileSystem fs; protected transient Configuration configuration; protected transient long lastScanMillis; protected transient Path filePath; protected transient InputStream inputStream; protected Set<String> pendingFiles = new LinkedHashSet<String>(); public String getDirectory() { return directory; } public void setDirectory(String directory) { this.directory = directory; } public DirectoryScanner getScanner() { return scanner; } public void setScanner(DirectoryScanner scanner) { this.scanner = scanner; } /** * Returns the frequency with which new files are scanned for in milliseconds. * @return The scan interval in milliseconds. */ public int getScanIntervalMillis() { return scanIntervalMillis; } /** * Sets the frequency with which new files are scanned for in milliseconds. * @param scanIntervalMillis The scan interval in milliseconds. */ public void setScanIntervalMillis(int scanIntervalMillis) { this.scanIntervalMillis = scanIntervalMillis; } /** * Returns the number of tuples emitted in a batch. If the operator is * idempotent then this is the number of tuples emitted in a window. * @return The number of tuples emitted in a batch. */ public int getEmitBatchSize() { return emitBatchSize; } /** * Sets the number of tuples to emit in a batch. If the operator is * idempotent then this is the number of tuples emitted in a window. * @param emitBatchSize The number of tuples to emit in a batch. */ public void setEmitBatchSize(int emitBatchSize) { this.emitBatchSize = emitBatchSize; } /** * Sets whether the operator is idempotent or not. * @param idempotentEmit If this is true, then the operator */ public void setIdempotentEmit(boolean idempotentEmit) { this.idempotentEmit = idempotentEmit; } /** * * @return */ public boolean isIdempotentEmit() { return idempotentEmit; } /** * Returns the desired number of partitions. * @return the desired number of partitions. */ public int getPartitionCount() { return partitionCount; } /** * Sets the desired number of partitions. * @param requiredPartitions The desired number of partitions. */ public void setPartitionCount(int requiredPartitions) { this.partitionCount = requiredPartitions; } /** * Returns the current number of partitions for the operator. * @return The current number of partitions for the operator. */ public int getCurrentPartitions() { return currentPartitions; } @Override public void setup(OperatorContext context) { globalProcessedFileCount.setValue((long) processedFiles.size()); LOG.debug("Setup processed file count: {}", globalProcessedFileCount); this.context = context; try { filePath = new Path(directory); configuration = new Configuration(); fs = FileSystem.newInstance(filePath.toUri(), configuration); if(!unfinishedFiles.isEmpty()) { retryFailedFile(unfinishedFiles.poll()); skipCount = 0; } else if(!failedFiles.isEmpty()) { retryFailedFile(failedFiles.poll()); skipCount = 0; } long startTime = System.currentTimeMillis(); LOG.info("Continue reading {} from index {} time={}", currentFile, offset, startTime); // fast forward to previous offset if(inputStream != null) { for(int index = 0; index < offset; index++) { readEntity(); } } LOG.info("Read offset={} records in setup time={}", offset, System.currentTimeMillis() - startTime); } catch (IOException ex) { failureHandling(ex); } fileCounters.setCounter(FileCounters.GLOBAL_PROCESSED_FILES, globalProcessedFileCount); fileCounters.setCounter(FileCounters.LOCAL_PROCESSED_FILES, localProcessedFileCount); fileCounters.setCounter(FileCounters.GLOBAL_NUMBER_OF_FAILURES, globalNumberOfFailures); fileCounters.setCounter(FileCounters.LOCAL_NUMBER_OF_FAILURES, localNumberOfFailures); fileCounters.setCounter(FileCounters.GLOBAL_NUMBER_OF_RETRIES, globalNumberOfRetries); fileCounters.setCounter(FileCounters.LOCAL_NUMBER_OF_RETRIES, localNumberOfRetries); fileCounters.setCounter(FileCounters.PENDING_FILES, pendingFileCount); } @Override public void teardown() { IOUtils.closeQuietly(inputStream); IOUtils.closeQuietly(fs); } @Override public void beginWindow(long windowId) { emit = true; } @Override public void endWindow() { if(context != null) { pendingFileCount.setValue(pendingFiles.size() + failedFiles.size() + unfinishedFiles.size()); if(currentFile != null) { pendingFileCount.increment(); } context.setCounters(fileCounters); } } @Override public void emitTuples() { //emit will be true if the operator is not idempotent. If the operator is //idempotent then emit will be true the first time emitTuples is called //within a window and false the other times emit tuples is called within a //window if(emit) { if (inputStream == null) { try { if(!unfinishedFiles.isEmpty()) { retryFailedFile(unfinishedFiles.poll()); } else if (!pendingFiles.isEmpty()) { String newPathString = pendingFiles.iterator().next(); pendingFiles.remove(newPathString); this.inputStream = openFile(new Path(newPathString)); } else if (!failedFiles.isEmpty()) { retryFailedFile(failedFiles.poll()); } else { scanDirectory(); } } catch (IOException ex) { failureHandling(ex); } } if (inputStream != null) { try { int counterForTuple = 0; while (counterForTuple++ < emitBatchSize) { T line = readEntity(); if (line == null) { LOG.info("done reading file ({} entries).", offset); closeFile(inputStream); break; } // If skipCount is non zero, then failed file recovery is going on, skipCount is // used to prevent already emitted records from being emitted again during recovery. // When failed file is open, skipCount is set to the last read offset for that file. // if (skipCount == 0) { offset++; emit(line); } else skipCount--; } } catch (IOException e) { failureHandling(e); } } //If the operator is idempotent, do nothing on other calls to emittuples //within the same window if(idempotentEmit) { emit = false; } } } /** * Scans the directory for new files. */ protected void scanDirectory() { if(System.currentTimeMillis() - scanIntervalMillis >= lastScanMillis) { Set<Path> newPaths = scanner.scan(fs, filePath, processedFiles); for(Path newPath : newPaths) { String newPathString = newPath.toString(); pendingFiles.add(newPathString); processedFiles.add(newPathString); localProcessedFileCount.increment(); } lastScanMillis = System.currentTimeMillis(); } } /** * Helper method for handling IOExceptions. * @param e The caught IOException. */ private void failureHandling(Exception e) { localNumberOfFailures.increment(); if(maxRetryCount <= 0) { throw new RuntimeException(e); } LOG.error("FS reader error", e); addToFailedList(); } protected void addToFailedList() { FailedFile ff = new FailedFile(currentFile, offset, retryCount); try { // try to close file if (this.inputStream != null) this.inputStream.close(); } catch(IOException e) { localNumberOfFailures.increment(); LOG.error("Could not close input stream on: " + currentFile); } ff.retryCount ++; ff.lastFailedTime = System.currentTimeMillis(); ff.offset = this.offset; // Clear current file state. this.currentFile = null; this.inputStream = null; this.offset = 0; if (ff.retryCount > maxRetryCount) return; localNumberOfRetries.increment(); LOG.info("adding to failed list path {} offset {} retry {}", ff.path, ff.offset, ff.retryCount); failedFiles.add(ff); } protected InputStream retryFailedFile(FailedFile ff) throws IOException { LOG.info("retrying failed file {} offset {} retry {}", ff.path, ff.offset, ff.retryCount); String path = ff.path; this.inputStream = openFile(new Path(path)); this.offset = ff.offset; this.retryCount = ff.retryCount; this.skipCount = ff.offset; return this.inputStream; } protected InputStream openFile(Path path) throws IOException { LOG.info("opening file {}", path); InputStream input = fs.open(path); currentFile = path.toString(); offset = 0; retryCount = 0; skipCount = 0; return input; } protected void closeFile(InputStream is) throws IOException { LOG.info("closing file {} offset {}", currentFile, offset); if (is != null) is.close(); currentFile = null; inputStream = null; } @Override public Collection<Partition<AbstractFSDirectoryInputOperator<T>>> definePartitions(Collection<Partition<AbstractFSDirectoryInputOperator<T>>> partitions, int incrementalCapacity) { lastRepartition = System.currentTimeMillis(); int totalCount = computedNewPartitionCount(partitions, incrementalCapacity); LOG.debug("Computed new partitions: {}", totalCount); if (totalCount == partitions.size()) { return partitions; } AbstractFSDirectoryInputOperator<T> tempOperator = partitions.iterator().next().getPartitionedInstance(); MutableLong tempGlobalNumberOfRetries = tempOperator.globalNumberOfRetries; MutableLong tempGlobalNumberOfFailures = tempOperator.globalNumberOfRetries; /* * Build collective state from all instances of the operator. */ Set<String> totalProcessedFiles = Sets.newHashSet(); Set<FailedFile> currentFiles = Sets.newHashSet(); List<DirectoryScanner> oldscanners = Lists.newLinkedList(); List<FailedFile> totalFailedFiles = Lists.newLinkedList(); List<String> totalPendingFiles = Lists.newLinkedList(); for(Partition<AbstractFSDirectoryInputOperator<T>> partition : partitions) { AbstractFSDirectoryInputOperator<T> oper = partition.getPartitionedInstance(); totalProcessedFiles.addAll(oper.processedFiles); totalFailedFiles.addAll(oper.failedFiles); totalPendingFiles.addAll(oper.pendingFiles); currentFiles.addAll(unfinishedFiles); tempGlobalNumberOfRetries.add(oper.localNumberOfRetries); tempGlobalNumberOfFailures.add(oper.localNumberOfFailures); if (oper.currentFile != null) currentFiles.add(new FailedFile(oper.currentFile, oper.offset)); oldscanners.add(oper.getScanner()); } /* * Create partitions of scanners, scanner's partition method will do state * transfer for DirectoryScanner objects. */ List<DirectoryScanner> scanners = scanner.partition(totalCount, oldscanners); Kryo kryo = new Kryo(); Collection<Partition<AbstractFSDirectoryInputOperator<T>>> newPartitions = Lists.newArrayListWithExpectedSize(totalCount); for (int i=0; i<scanners.size(); i++) { AbstractFSDirectoryInputOperator<T> oper = kryo.copy(this); DirectoryScanner scn = scanners.get(i); oper.setScanner(scn); // Do state transfer for processed files. oper.processedFiles.addAll(totalProcessedFiles); oper.globalNumberOfFailures = tempGlobalNumberOfRetries; oper.localNumberOfFailures.setValue(0); oper.globalNumberOfRetries = tempGlobalNumberOfFailures; oper.localNumberOfRetries.setValue(0); /* redistribute unfinished files properly */ oper.unfinishedFiles.clear(); oper.currentFile = null; oper.offset = 0; Iterator<FailedFile> unfinishedIter = currentFiles.iterator(); while(unfinishedIter.hasNext()) { FailedFile unfinishedFile = unfinishedIter.next(); if (scn.acceptFile(unfinishedFile.path)) { oper.unfinishedFiles.add(unfinishedFile); unfinishedIter.remove(); } } /* transfer failed files */ oper.failedFiles.clear(); Iterator<FailedFile> iter = totalFailedFiles.iterator(); while (iter.hasNext()) { FailedFile ff = iter.next(); if (scn.acceptFile(ff.path)) { oper.failedFiles.add(ff); iter.remove(); } } /* redistribute pending files properly */ oper.pendingFiles.clear(); Iterator<String> pendingFilesIterator = totalPendingFiles.iterator(); while(pendingFilesIterator.hasNext()) { String pathString = pendingFilesIterator.next(); if(scn.acceptFile(pathString)) { oper.pendingFiles.add(pathString); pendingFilesIterator.remove(); } } newPartitions.add(new DefaultPartition<AbstractFSDirectoryInputOperator<T>>(oper)); } LOG.info("definePartitions called returning {} partitions", newPartitions.size()); return newPartitions; } protected int computedNewPartitionCount(Collection<Partition<AbstractFSDirectoryInputOperator<T>>> partitions, int incrementalCapacity) { boolean isInitialParitition = partitions.iterator().next().getStats() == null; if (isInitialParitition && partitionCount == 1) { partitionCount = currentPartitions = partitions.size() + incrementalCapacity; } else { incrementalCapacity = partitionCount - currentPartitions; } int totalCount = partitions.size() + incrementalCapacity; LOG.info("definePartitions trying to create {} partitions, current {} required {}", totalCount, partitionCount, currentPartitions); return totalCount; } @Override public void partitioned(Map<Integer, Partition<AbstractFSDirectoryInputOperator<T>>> partitions) { currentPartitions = partitions.size(); } /** * Read the next item from the stream. Depending on the type of stream, this could be a byte array, line or object. * Upon return of null, the stream will be considered fully consumed. * @throws IOException * @return Depending on the type of stream an object is returned. When null is returned the stream is consumed. */ abstract protected T readEntity() throws IOException; /** * Emit the tuple on the port * @param tuple */ abstract protected void emit(T tuple); /** * Repartition is required when number of partitions are not equal to required * partitions. * @param batchedOperatorStats the stats to use when repartitioning. * @return Returns the stats listener response. */ @Override public Response processStats(BatchedOperatorStats batchedOperatorStats) { Response res = new Response(); res.repartitionRequired = false; if (currentPartitions != partitionCount) { LOG.info("processStats: trying repartition of input operator current {} required {}", currentPartitions, partitionCount); res.repartitionRequired = true; } return res; } /** * Returns the maximum number of times the operator will attempt to process * a file on which it encounters an error. * @return The maximum number of times the operator will attempt to process a * file on which it encounters an error. */ public int getMaxRetryCount() { return maxRetryCount; } /** * Sets the maximum number of times the operator will attempt to process * a file on which it encounters an error. * @param maxRetryCount The maximum number of times the operator will attempt * to process a file on which it encounters an error. */ public void setMaxRetryCount(int maxRetryCount) { this.maxRetryCount = maxRetryCount; } /** * The class that is used to scan for new files in the directory for the * AbstractFSDirectoryInputOperator. */ public static class DirectoryScanner implements Serializable { private static final long serialVersionUID = 4535844463258899929L; private String filePatternRegexp; private transient Pattern regex = null; private int partitionIndex; private int partitionCount; private final transient HashSet<String> ignoredFiles = new HashSet<String>(); public String getFilePatternRegexp() { return filePatternRegexp; } public void setFilePatternRegexp(String filePatternRegexp) { this.filePatternRegexp = filePatternRegexp; this.regex = null; } public Pattern getRegex() { if (this.regex == null && this.filePatternRegexp != null) this.regex = Pattern.compile(this.filePatternRegexp); return this.regex; } public int getPartitionCount() { return partitionCount; } public int getPartitionIndex() { return partitionIndex; } public LinkedHashSet<Path> scan(FileSystem fs, Path filePath, Set<String> consumedFiles) { if (filePatternRegexp != null && this.regex == null) { this.regex = Pattern.compile(this.filePatternRegexp); } LinkedHashSet<Path> pathSet = Sets.newLinkedHashSet(); try { LOG.debug("Scanning {} with pattern {}", filePath, this.filePatternRegexp); FileStatus[] files = fs.listStatus(filePath); for (FileStatus status : files) { Path path = status.getPath(); String filePathStr = path.toString(); if (consumedFiles.contains(filePathStr)) { continue; } if (ignoredFiles.contains(filePathStr)) { continue; } if (acceptFile(filePathStr)) { LOG.debug("Found {}", filePathStr); pathSet.add(path); } else { // don't look at it again ignoredFiles.add(filePathStr); } } } catch (FileNotFoundException e) { LOG.warn("Failed to list directory {}", filePath, e); } catch (IOException e) { throw new RuntimeException(e); } return pathSet; } protected boolean acceptFile(String filePathStr) { if (partitionCount > 1) { int i = filePathStr.hashCode(); int mod = i % partitionCount; if (mod < 0) { mod += partitionCount; } LOG.debug("partition {} {} {} {}", partitionIndex, filePathStr, i, mod); if (mod != partitionIndex) { return false; } } if (filePatternRegexp != null && this.regex == null) { regex = Pattern.compile(this.filePatternRegexp); } if (regex != null) { Matcher matcher = regex.matcher(filePathStr); if (!matcher.matches()) { return false; } } return true; } public List<DirectoryScanner> partition(int count) { ArrayList<DirectoryScanner> partitions = Lists.newArrayListWithExpectedSize(count); for (int i=0; i<count; i++) { partitions.add(this.createPartition(i, count)); } return partitions; } public List<DirectoryScanner> partition(int count , Collection<DirectoryScanner> scanners) { return partition(count); } protected DirectoryScanner createPartition(int partitionIndex, int partitionCount) { DirectoryScanner that = new DirectoryScanner(); that.filePatternRegexp = this.filePatternRegexp; that.regex = this.regex; that.partitionIndex = partitionIndex; that.partitionCount = partitionCount; return that; } @Override public String toString() { return "DirectoryScanner [filePatternRegexp=" + filePatternRegexp + " partitionIndex=" + partitionIndex + " partitionCount=" + partitionCount + "]"; } } }
library/src/main/java/com/datatorrent/lib/io/fs/AbstractFSDirectoryInputOperator.java
/* * Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datatorrent.lib.io.fs; import com.datatorrent.api.Context.CountersAggregator; import com.datatorrent.api.Context.OperatorContext; import com.datatorrent.api.DefaultPartition; import com.datatorrent.api.InputOperator; import com.datatorrent.api.Partitioner; import com.datatorrent.api.StatsListener; import com.datatorrent.lib.counters.BasicCounters; import com.esotericsoftware.kryo.Kryo; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.validation.constraints.NotNull; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.mutable.MutableLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Input operator that reads files from a directory. * <p/> * Derived class defines how to read entries from the input stream and emit to the port. * <p/> * The directory scanning logic is pluggable to support custom directory layouts and naming schemes. The default * implementation scans a single directory. * <p/> * Fault tolerant by tracking previously read files and current offset as part of checkpoint state. In case of failure * the operator will skip files that were already processed and fast forward to the offset of the current file. * <p/> * Supports partitioning and dynamic changes to number of partitions through property {@link #partitionCount}. The * directory scanner is responsible to only accept the files that belong to a partition. * <p/> * This class supports retrying of failed files by putting them into failed list, and retrying them after pending * files are processed. Retrying is disabled when maxRetryCount is set to zero. * @param <T> The type of the object that this input operator reads. * @since 1.0.2 */ public abstract class AbstractFSDirectoryInputOperator<T> implements InputOperator, Partitioner<AbstractFSDirectoryInputOperator<T>>, StatsListener { private static final Logger LOG = LoggerFactory.getLogger(AbstractFSDirectoryInputOperator.class); @NotNull protected String directory; @NotNull protected DirectoryScanner scanner = new DirectoryScanner(); protected int scanIntervalMillis = 5000; protected int offset; protected String currentFile; protected Set<String> processedFiles = new HashSet<String>(); protected int emitBatchSize = 1000; protected int currentPartitions = 1 ; protected int partitionCount = 1; private int retryCount = 0; private int maxRetryCount = 5; transient protected int skipCount = 0; private transient OperatorContext context; protected long globalNumberOfFailures = 0; protected long localNumberOfFailures = 0; protected long globalNumberOfRetries = 0; protected long localNumberOfRetries = 0; private transient int globalProcessedFileCount = 0; private transient int localProcessedFileCount = 0; /** * Class representing failed file, When read fails on a file in middle, then the file is * added to failedList along with last read offset. * The files from failedList will be processed after all pendingFiles are processed, but * before checking for new files. * failed file is retried for maxRetryCount number of times, after that the file is * ignored. */ protected static class FailedFile { String path; int offset; int retryCount; long lastFailedTime; /* For kryo serialization */ protected FailedFile() {} protected FailedFile(String path, int offset) { this.path = path; this.offset = offset; this.retryCount = 0; } protected FailedFile(String path, int offset, int retryCount) { this.path = path; this.offset = offset; this.retryCount = retryCount; } @Override public String toString() { return "FailedFile[" + "path='" + path + '\'' + ", offset=" + offset + ", retryCount=" + retryCount + ", lastFailedTime=" + lastFailedTime + ']'; } } /** * Enums for aggregated counters about file processing. * <p/> * Contains the enums representing number of files processed, number of * pending files, number of file errors, and number of retries. * <p/> * @since 1.0.4 */ public static enum AggregatedFileCounters { /** * The number of files processed by the logical operator up until this. * point in time */ PROCESSED_FILES, /** * The number of files waiting to be processed by the logical operator. */ PENDING_FILES, /** * The number of IO errors encountered by the logical operator. */ NUMBER_OF_ERRORS, /** * The number of times the logical operator tried to resume reading a file * on which it encountered an error. */ NUMBER_OF_RETRIES } /** * The enums used to track statistics about the * AbstractFSDirectoryInputOperator. */ protected static enum FileCounters { /** * The number of files that were in the processed list up to the last * repartition of the operator. */ GLOBAL_PROCESSED_FILES, /** * The number of files added to the processed list by the physical operator * since the last repartition. */ LOCAL_PROCESSED_FILES, /** * The number of io errors encountered up to the last repartition of the * operator. */ GLOBAL_NUMBER_OF_FAILURES, /** * The number of failures encountered by the physical operator since the * last repartition. */ LOCAL_NUMBER_OF_FAILURES, /** * The number of retries encountered by the physical operator up to the last * repartition. */ GLOBAL_NUMBER_OF_RETRIES, /** * The number of retries encountered by the physical operator since the last * repartition. */ LOCAL_NUMBER_OF_RETRIES, /** * The number of files pending on the physical operator. */ PENDING_FILES } /** * A counter aggregator for AbstractFSDirectoryInputOperator. * <p/> * In order for this CountersAggregator to be used on your operator, you must * set it within your application like this. * <p/> * <code> * dag.getOperatorMeta("fsinputoperator").getAttributes().put(OperatorContext.COUNTERS_AGGREGATOR, * new AbstractFSDirectoryInputOperator.FileCountersAggregator()); * </code> * <p/> * The value of the aggregated counter can be retrieved by issuing a get * request to the host running your gateway like this. * <p/> * <code> * http://&lt;your host&gt;:9090/ws/v1/applications/&lt;your app id&gt;/logicalPlan/operators/&lt;operatorname&gt;/aggregation * </code> * <p/> * @since 1.0.4 */ public final static class FileCountersAggregator implements CountersAggregator, Serializable { private static final long serialVersionUID = 201409041428L; public FileCountersAggregator() { } @Override @SuppressWarnings("unchecked") public Object aggregate(Collection<?> countersList) { if(countersList.isEmpty()) { return null; } BasicCounters<MutableLong> tempFileCounters = (BasicCounters<MutableLong>) countersList.iterator().next(); MutableLong globalProcessedFiles = tempFileCounters.getCounter(FileCounters.GLOBAL_PROCESSED_FILES); MutableLong globalNumberOfFailures = tempFileCounters.getCounter(FileCounters.GLOBAL_NUMBER_OF_FAILURES); MutableLong globalNumberOfRetries = tempFileCounters.getCounter(FileCounters.GLOBAL_NUMBER_OF_RETRIES); MutableLong totalLocalProcessedFiles = new MutableLong(0); MutableLong pendingFiles = new MutableLong(0); MutableLong totalLocalNumberOfFailures = new MutableLong(0); MutableLong totalLocalNumberOfRetries = new MutableLong(0); for(Object fileCounters: countersList) { BasicCounters<MutableLong> basicFileCounters = (BasicCounters<MutableLong>) fileCounters; totalLocalProcessedFiles.add(basicFileCounters.getCounter(FileCounters.LOCAL_PROCESSED_FILES)); pendingFiles.add(basicFileCounters.getCounter(FileCounters.PENDING_FILES)); totalLocalNumberOfFailures.add(basicFileCounters.getCounter(FileCounters.LOCAL_NUMBER_OF_FAILURES)); totalLocalNumberOfRetries.add(basicFileCounters.getCounter(FileCounters.LOCAL_NUMBER_OF_RETRIES)); } globalProcessedFiles.add(totalLocalProcessedFiles); globalProcessedFiles.subtract(pendingFiles); globalNumberOfFailures.add(totalLocalNumberOfFailures); globalNumberOfRetries.add(totalLocalNumberOfRetries); BasicCounters<MutableLong> aggregatedCounters = new BasicCounters<MutableLong>(MutableLong.class); aggregatedCounters.setCounter(AggregatedFileCounters.PROCESSED_FILES, globalProcessedFiles); aggregatedCounters.setCounter(AggregatedFileCounters.PENDING_FILES, pendingFiles); aggregatedCounters.setCounter(AggregatedFileCounters.NUMBER_OF_ERRORS, totalLocalNumberOfFailures); aggregatedCounters.setCounter(AggregatedFileCounters.NUMBER_OF_RETRIES, totalLocalNumberOfRetries); return aggregatedCounters; } } protected long lastRepartition = 0; private transient boolean emit = true; protected boolean idempotentEmit = false; /* List of unfinished files */ protected Queue<FailedFile> unfinishedFiles = new LinkedList<FailedFile>(); /* List of failed file */ protected Queue<FailedFile> failedFiles = new LinkedList<FailedFile>(); protected transient FileSystem fs; protected transient Configuration configuration; protected transient long lastScanMillis; protected transient Path filePath; protected transient InputStream inputStream; protected Set<String> pendingFiles = new LinkedHashSet<String>(); public String getDirectory() { return directory; } public void setDirectory(String directory) { this.directory = directory; } public DirectoryScanner getScanner() { return scanner; } public void setScanner(DirectoryScanner scanner) { this.scanner = scanner; } /** * Returns the frequency with which new files are scanned for in milliseconds. * @return The scan interval in milliseconds. */ public int getScanIntervalMillis() { return scanIntervalMillis; } /** * Sets the frequency with which new files are scanned for in milliseconds. * @param scanIntervalMillis The scan interval in milliseconds. */ public void setScanIntervalMillis(int scanIntervalMillis) { this.scanIntervalMillis = scanIntervalMillis; } /** * Returns the number of tuples emitted in a batch. If the operator is * idempotent then this is the number of tuples emitted in a window. * @return The number of tuples emitted in a batch. */ public int getEmitBatchSize() { return emitBatchSize; } /** * Sets the number of tuples to emit in a batch. If the operator is * idempotent then this is the number of tuples emitted in a window. * @param emitBatchSize The number of tuples to emit in a batch. */ public void setEmitBatchSize(int emitBatchSize) { this.emitBatchSize = emitBatchSize; } /** * Sets whether the operator is idempotent or not. * @param idempotentEmit If this is true, then the operator */ public void setIdempotentEmit(boolean idempotentEmit) { this.idempotentEmit = idempotentEmit; } /** * * @return */ public boolean isIdempotentEmit() { return idempotentEmit; } /** * Returns the desired number of partitions. * @return the desired number of partitions. */ public int getPartitionCount() { return partitionCount; } /** * Sets the desired number of partitions. * @param requiredPartitions The desired number of partitions. */ public void setPartitionCount(int requiredPartitions) { this.partitionCount = requiredPartitions; } /** * Returns the current number of partitions for the operator. * @return The current number of partitions for the operator. */ public int getCurrentPartitions() { return currentPartitions; } @Override public void setup(OperatorContext context) { globalProcessedFileCount = processedFiles.size(); LOG.debug("Setup processed file count: {}", globalProcessedFileCount); this.context = context; try { filePath = new Path(directory); configuration = new Configuration(); fs = FileSystem.newInstance(filePath.toUri(), configuration); if(!unfinishedFiles.isEmpty()) { retryFailedFile(unfinishedFiles.poll()); skipCount = 0; } else if(!failedFiles.isEmpty()) { retryFailedFile(failedFiles.poll()); skipCount = 0; } long startTime = System.currentTimeMillis(); LOG.info("Continue reading {} from index {} time={}", currentFile, offset, startTime); // fast forward to previous offset if(inputStream != null) { for(int index = 0; index < offset; index++) { readEntity(); } } LOG.info("Read offset={} records in setup time={}", offset, System.currentTimeMillis() - startTime); } catch (IOException ex) { failureHandling(ex); } } @Override public void teardown() { IOUtils.closeQuietly(inputStream); IOUtils.closeQuietly(fs); } @Override public void beginWindow(long windowId) { emit = true; } @Override public void endWindow() { if(context != null) { int pendingFileCount = pendingFiles.size() + failedFiles.size() + unfinishedFiles.size(); if(currentFile != null) { pendingFileCount++; } BasicCounters<MutableLong> fileCounters = new BasicCounters<MutableLong>(MutableLong.class); fileCounters.setCounter(FileCounters.GLOBAL_PROCESSED_FILES, new MutableLong(globalProcessedFileCount)); fileCounters.setCounter(FileCounters.LOCAL_PROCESSED_FILES, new MutableLong(localProcessedFileCount)); fileCounters.setCounter(FileCounters.GLOBAL_NUMBER_OF_FAILURES, new MutableLong(globalNumberOfFailures)); fileCounters.setCounter(FileCounters.LOCAL_NUMBER_OF_FAILURES, new MutableLong(localNumberOfFailures)); fileCounters.setCounter(FileCounters.GLOBAL_NUMBER_OF_RETRIES, new MutableLong(globalNumberOfRetries)); fileCounters.setCounter(FileCounters.LOCAL_NUMBER_OF_RETRIES, new MutableLong(localNumberOfRetries)); fileCounters.setCounter(FileCounters.PENDING_FILES, new MutableLong(pendingFileCount)); context.setCounters(fileCounters); } } @Override public void emitTuples() { //emit will be true if the operator is not idempotent. If the operator is //idempotent then emit will be true the first time emitTuples is called //within a window and false the other times emit tuples is called within a //window if(emit) { if (inputStream == null) { try { if(!unfinishedFiles.isEmpty()) { retryFailedFile(unfinishedFiles.poll()); } else if (!pendingFiles.isEmpty()) { String newPathString = pendingFiles.iterator().next(); pendingFiles.remove(newPathString); this.inputStream = openFile(new Path(newPathString)); } else if (!failedFiles.isEmpty()) { retryFailedFile(failedFiles.poll()); } else { scanDirectory(); } } catch (IOException ex) { failureHandling(ex); } } if (inputStream != null) { try { int counterForTuple = 0; while (counterForTuple++ < emitBatchSize) { T line = readEntity(); if (line == null) { LOG.info("done reading file ({} entries).", offset); closeFile(inputStream); break; } // If skipCount is non zero, then failed file recovery is going on, skipCount is // used to prevent already emitted records from being emitted again during recovery. // When failed file is open, skipCount is set to the last read offset for that file. // if (skipCount == 0) { offset++; emit(line); } else skipCount--; } } catch (IOException e) { failureHandling(e); } } //If the operator is idempotent, do nothing on other calls to emittuples //within the same window if(idempotentEmit) { emit = false; } } } /** * Scans the directory for new files. */ protected void scanDirectory() { if(System.currentTimeMillis() - scanIntervalMillis >= lastScanMillis) { Set<Path> newPaths = scanner.scan(fs, filePath, processedFiles); for(Path newPath : newPaths) { String newPathString = newPath.toString(); pendingFiles.add(newPathString); processedFiles.add(newPathString); localProcessedFileCount++; } lastScanMillis = System.currentTimeMillis(); } } /** * Helper method for handling IOExceptions. * @param e The caught IOException. */ private void failureHandling(Exception e) { localNumberOfFailures++; if(maxRetryCount <= 0) { throw new RuntimeException(e); } LOG.error("FS reader error", e); addToFailedList(); } protected void addToFailedList() { FailedFile ff = new FailedFile(currentFile, offset, retryCount); try { // try to close file if (this.inputStream != null) this.inputStream.close(); } catch(IOException e) { localNumberOfFailures++; LOG.error("Could not close input stream on: " + currentFile); } ff.retryCount ++; ff.lastFailedTime = System.currentTimeMillis(); ff.offset = this.offset; // Clear current file state. this.currentFile = null; this.inputStream = null; this.offset = 0; if (ff.retryCount > maxRetryCount) return; localNumberOfRetries++; LOG.info("adding to failed list path {} offset {} retry {}", ff.path, ff.offset, ff.retryCount); failedFiles.add(ff); } protected InputStream retryFailedFile(FailedFile ff) throws IOException { LOG.info("retrying failed file {} offset {} retry {}", ff.path, ff.offset, ff.retryCount); String path = ff.path; this.inputStream = openFile(new Path(path)); this.offset = ff.offset; this.retryCount = ff.retryCount; this.skipCount = ff.offset; return this.inputStream; } protected InputStream openFile(Path path) throws IOException { LOG.info("opening file {}", path); InputStream input = fs.open(path); currentFile = path.toString(); offset = 0; retryCount = 0; skipCount = 0; return input; } protected void closeFile(InputStream is) throws IOException { LOG.info("closing file {} offset {}", currentFile, offset); if (is != null) is.close(); currentFile = null; inputStream = null; } @Override public Collection<Partition<AbstractFSDirectoryInputOperator<T>>> definePartitions(Collection<Partition<AbstractFSDirectoryInputOperator<T>>> partitions, int incrementalCapacity) { lastRepartition = System.currentTimeMillis(); int totalCount = computedNewPartitionCount(partitions, incrementalCapacity); LOG.debug("Computed new partitions: {}", totalCount); if (totalCount == partitions.size()) { return partitions; } AbstractFSDirectoryInputOperator<T> tempOperator = partitions.iterator().next().getPartitionedInstance(); long tempGlobalNumberOfRetries = tempOperator.globalNumberOfRetries; long tempGlobalNumberOfFailures = tempOperator.globalNumberOfRetries; /* * Build collective state from all instances of the operator. */ Set<String> totalProcessedFiles = new HashSet<String>(); Set<FailedFile> currentFiles = new HashSet<FailedFile>(); List<DirectoryScanner> oldscanners = new LinkedList<DirectoryScanner>(); List<FailedFile> totalFailedFiles = new LinkedList<FailedFile>(); List<String> totalPendingFiles = new LinkedList<String>(); for(Partition<AbstractFSDirectoryInputOperator<T>> partition : partitions) { AbstractFSDirectoryInputOperator<T> oper = partition.getPartitionedInstance(); totalProcessedFiles.addAll(oper.processedFiles); totalFailedFiles.addAll(oper.failedFiles); totalPendingFiles.addAll(oper.pendingFiles); currentFiles.addAll(unfinishedFiles); tempGlobalNumberOfRetries += oper.localNumberOfRetries; tempGlobalNumberOfFailures += oper.localNumberOfFailures; if (oper.currentFile != null) currentFiles.add(new FailedFile(oper.currentFile, oper.offset)); oldscanners.add(oper.getScanner()); } /* * Create partitions of scanners, scanner's partition method will do state * transfer for DirectoryScanner objects. */ List<DirectoryScanner> scanners = scanner.partition(totalCount, oldscanners); Kryo kryo = new Kryo(); Collection<Partition<AbstractFSDirectoryInputOperator<T>>> newPartitions = Lists.newArrayListWithExpectedSize(totalCount); for (int i=0; i<scanners.size(); i++) { AbstractFSDirectoryInputOperator<T> oper = kryo.copy(this); DirectoryScanner scn = scanners.get(i); oper.setScanner(scn); // Do state transfer for processed files. oper.processedFiles.addAll(totalProcessedFiles); oper.globalNumberOfFailures = tempGlobalNumberOfRetries; oper.localNumberOfFailures = 0; oper.globalNumberOfRetries = tempGlobalNumberOfFailures; oper.localNumberOfRetries = 0; /* redistribute unfinished files properly */ oper.unfinishedFiles.clear(); oper.currentFile = null; oper.offset = 0; Iterator<FailedFile> unfinishedIter = currentFiles.iterator(); while(unfinishedIter.hasNext()) { FailedFile unfinishedFile = unfinishedIter.next(); if (scn.acceptFile(unfinishedFile.path)) { oper.unfinishedFiles.add(unfinishedFile); unfinishedIter.remove(); } } /* transfer failed files */ oper.failedFiles.clear(); Iterator<FailedFile> iter = totalFailedFiles.iterator(); while (iter.hasNext()) { FailedFile ff = iter.next(); if (scn.acceptFile(ff.path)) { oper.failedFiles.add(ff); iter.remove(); } } /* redistribute pending files properly */ oper.pendingFiles.clear(); Iterator<String> pendingFilesIterator = totalPendingFiles.iterator(); while(pendingFilesIterator.hasNext()) { String pathString = pendingFilesIterator.next(); if(scn.acceptFile(pathString)) { oper.pendingFiles.add(pathString); pendingFilesIterator.remove(); } } newPartitions.add(new DefaultPartition<AbstractFSDirectoryInputOperator<T>>(oper)); } LOG.info("definePartitions called returning {} partitions", newPartitions.size()); return newPartitions; } protected int computedNewPartitionCount(Collection<Partition<AbstractFSDirectoryInputOperator<T>>> partitions, int incrementalCapacity) { boolean isInitialParitition = partitions.iterator().next().getStats() == null; if (isInitialParitition && partitionCount == 1) { partitionCount = currentPartitions = partitions.size() + incrementalCapacity; } else { incrementalCapacity = partitionCount - currentPartitions; } int totalCount = partitions.size() + incrementalCapacity; LOG.info("definePartitions trying to create {} partitions, current {} required {}", totalCount, partitionCount, currentPartitions); return totalCount; } @Override public void partitioned(Map<Integer, Partition<AbstractFSDirectoryInputOperator<T>>> partitions) { currentPartitions = partitions.size(); } /** * Read the next item from the stream. Depending on the type of stream, this could be a byte array, line or object. * Upon return of null, the stream will be considered fully consumed. * @throws IOException * @return Depending on the type of stream an object is returned. When null is returned the stream is consumed. */ abstract protected T readEntity() throws IOException; /** * Emit the tuple on the port * @param tuple */ abstract protected void emit(T tuple); /** * Repartition is required when number of partitions are not equal to required * partitions. * @param batchedOperatorStats the stats to use when repartitioning. * @return Returns the stats listener response. */ @Override public Response processStats(BatchedOperatorStats batchedOperatorStats) { Response res = new Response(); res.repartitionRequired = false; if (currentPartitions != partitionCount) { LOG.info("processStats: trying repartition of input operator current {} required {}", currentPartitions, partitionCount); res.repartitionRequired = true; } return res; } /** * Returns the maximum number of times the operator will attempt to process * a file on which it encounters an error. * @return The maximum number of times the operator will attempt to process a * file on which it encounters an error. */ public int getMaxRetryCount() { return maxRetryCount; } /** * Sets the maximum number of times the operator will attempt to process * a file on which it encounters an error. * @param maxRetryCount The maximum number of times the operator will attempt * to process a file on which it encounters an error. */ public void setMaxRetryCount(int maxRetryCount) { this.maxRetryCount = maxRetryCount; } /** * The class that is used to scan for new files in the directory for the * AbstractFSDirectoryInputOperator. */ public static class DirectoryScanner implements Serializable { private static final long serialVersionUID = 4535844463258899929L; private String filePatternRegexp; private transient Pattern regex = null; private int partitionIndex; private int partitionCount; private final transient HashSet<String> ignoredFiles = new HashSet<String>(); public String getFilePatternRegexp() { return filePatternRegexp; } public void setFilePatternRegexp(String filePatternRegexp) { this.filePatternRegexp = filePatternRegexp; this.regex = null; } public Pattern getRegex() { if (this.regex == null && this.filePatternRegexp != null) this.regex = Pattern.compile(this.filePatternRegexp); return this.regex; } public int getPartitionCount() { return partitionCount; } public int getPartitionIndex() { return partitionIndex; } public LinkedHashSet<Path> scan(FileSystem fs, Path filePath, Set<String> consumedFiles) { if (filePatternRegexp != null && this.regex == null) { this.regex = Pattern.compile(this.filePatternRegexp); } LinkedHashSet<Path> pathSet = Sets.newLinkedHashSet(); try { LOG.debug("Scanning {} with pattern {}", filePath, this.filePatternRegexp); FileStatus[] files = fs.listStatus(filePath); for (FileStatus status : files) { Path path = status.getPath(); String filePathStr = path.toString(); if (consumedFiles.contains(filePathStr)) { continue; } if (ignoredFiles.contains(filePathStr)) { continue; } if (acceptFile(filePathStr)) { LOG.debug("Found {}", filePathStr); pathSet.add(path); } else { // don't look at it again ignoredFiles.add(filePathStr); } } } catch (FileNotFoundException e) { LOG.warn("Failed to list directory {}", filePath, e); } catch (IOException e) { throw new RuntimeException(e); } return pathSet; } protected boolean acceptFile(String filePathStr) { if (partitionCount > 1) { int i = filePathStr.hashCode(); int mod = i % partitionCount; if (mod < 0) { mod += partitionCount; } LOG.debug("partition {} {} {} {}", partitionIndex, filePathStr, i, mod); if (mod != partitionIndex) { return false; } } if (filePatternRegexp != null && this.regex == null) { regex = Pattern.compile(this.filePatternRegexp); } if (regex != null) { Matcher matcher = regex.matcher(filePathStr); if (!matcher.matches()) { return false; } } return true; } public List<DirectoryScanner> partition(int count) { ArrayList<DirectoryScanner> partitions = Lists.newArrayListWithExpectedSize(count); for (int i=0; i<count; i++) { partitions.add(this.createPartition(i, count)); } return partitions; } public List<DirectoryScanner> partition(int count , Collection<DirectoryScanner> scanners) { return partition(count); } protected DirectoryScanner createPartition(int partitionIndex, int partitionCount) { DirectoryScanner that = new DirectoryScanner(); that.filePatternRegexp = this.filePatternRegexp; that.regex = this.regex; that.partitionIndex = partitionIndex; that.partitionCount = partitionCount; return that; } @Override public String toString() { return "DirectoryScanner [filePatternRegexp=" + filePatternRegexp + " partitionIndex=" + partitionIndex + " partitionCount=" + partitionCount + "]"; } } }
Cleaned up unnecessary object allocations.
library/src/main/java/com/datatorrent/lib/io/fs/AbstractFSDirectoryInputOperator.java
Cleaned up unnecessary object allocations.
Java
apache-2.0
568c7e813aa17679248b144883cf6faf961cecda
0
ibinti/intellij-community,retomerz/intellij-community,allotria/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,signed/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,izonder/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,petteyg/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,caot/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,allotria/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,ernestp/consulo,suncycheng/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,samthor/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,jagguli/intellij-community,hurricup/intellij-community,hurricup/intellij-community,FHannes/intellij-community,ryano144/intellij-community,slisson/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,retomerz/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,kool79/intellij-community,holmes/intellij-community,FHannes/intellij-community,vladmm/intellij-community,jagguli/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,diorcety/intellij-community,izonder/intellij-community,izonder/intellij-community,samthor/intellij-community,ernestp/consulo,vvv1559/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,semonte/intellij-community,retomerz/intellij-community,fnouama/intellij-community,kdwink/intellij-community,diorcety/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,ernestp/consulo,robovm/robovm-studio,kool79/intellij-community,semonte/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,holmes/intellij-community,allotria/intellij-community,signed/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,izonder/intellij-community,fnouama/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,caot/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,allotria/intellij-community,diorcety/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,izonder/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,caot/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,consulo/consulo,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,retomerz/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,asedunov/intellij-community,allotria/intellij-community,suncycheng/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,robovm/robovm-studio,blademainer/intellij-community,robovm/robovm-studio,fnouama/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,holmes/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,ahb0327/intellij-community,semonte/intellij-community,signed/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,signed/intellij-community,jagguli/intellij-community,semonte/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,kool79/intellij-community,slisson/intellij-community,vladmm/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,ol-loginov/intellij-community,signed/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,ernestp/consulo,kdwink/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,caot/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,izonder/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,jagguli/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,hurricup/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,slisson/intellij-community,kdwink/intellij-community,diorcety/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,ryano144/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,dslomov/intellij-community,vladmm/intellij-community,vladmm/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,retomerz/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,diorcety/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,wreckJ/intellij-community,signed/intellij-community,da1z/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,xfournet/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,da1z/intellij-community,fitermay/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,da1z/intellij-community,consulo/consulo,mglukhikh/intellij-community,vladmm/intellij-community,hurricup/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,consulo/consulo,petteyg/intellij-community,suncycheng/intellij-community,supersven/intellij-community,signed/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,diorcety/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,petteyg/intellij-community,ibinti/intellij-community,slisson/intellij-community,robovm/robovm-studio,clumsy/intellij-community,jagguli/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,asedunov/intellij-community,izonder/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,holmes/intellij-community,robovm/robovm-studio,blademainer/intellij-community,fnouama/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,ryano144/intellij-community,fnouama/intellij-community,caot/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,signed/intellij-community,supersven/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,holmes/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,da1z/intellij-community,orekyuu/intellij-community,semonte/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,diorcety/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,kool79/intellij-community,retomerz/intellij-community,holmes/intellij-community,supersven/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,ibinti/intellij-community,da1z/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,semonte/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,kool79/intellij-community,clumsy/intellij-community,gnuhub/intellij-community,izonder/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,vladmm/intellij-community,caot/intellij-community,fnouama/intellij-community,ibinti/intellij-community,apixandru/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,slisson/intellij-community,signed/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,samthor/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,caot/intellij-community,allotria/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,samthor/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,signed/intellij-community,supersven/intellij-community,suncycheng/intellij-community,allotria/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,hurricup/intellij-community,consulo/consulo,TangHao1987/intellij-community,Distrotech/intellij-community,kool79/intellij-community,wreckJ/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,kdwink/intellij-community,fitermay/intellij-community,caot/intellij-community,clumsy/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,ibinti/intellij-community,slisson/intellij-community,kdwink/intellij-community,da1z/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,diorcety/intellij-community,asedunov/intellij-community,jagguli/intellij-community,apixandru/intellij-community,ernestp/consulo,asedunov/intellij-community,clumsy/intellij-community,clumsy/intellij-community,dslomov/intellij-community,retomerz/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,amith01994/intellij-community,holmes/intellij-community,supersven/intellij-community,clumsy/intellij-community,caot/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,kool79/intellij-community,caot/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,ernestp/consulo,kdwink/intellij-community,ibinti/intellij-community,signed/intellij-community,semonte/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,FHannes/intellij-community,holmes/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,consulo/consulo,michaelgallacher/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,signed/intellij-community,consulo/consulo,allotria/intellij-community,clumsy/intellij-community,dslomov/intellij-community,retomerz/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Pair; import com.intellij.psi.*; import com.intellij.psi.impl.source.HierarchicalMethodSignatureImpl; import com.intellij.psi.search.searches.DeepestSuperMethodsSearch; import com.intellij.psi.search.searches.SuperMethodsSearch; import com.intellij.psi.util.*; import com.intellij.util.NotNullFunction; import com.intellij.util.Processor; import com.intellij.util.SmartList; import gnu.trove.THashMap; import gnu.trove.THashSet; import gnu.trove.TObjectHashingStrategy; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; public class PsiSuperMethodImplUtil { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.PsiSuperMethodImplUtil"); private static final PsiCacheKey<Map<MethodSignature, HierarchicalMethodSignature>, PsiClass> SIGNATURES_KEY = PsiCacheKey .create("SIGNATURES_KEY", new NotNullFunction<PsiClass, Map<MethodSignature, HierarchicalMethodSignature>>() { @NotNull @Override public Map<MethodSignature, HierarchicalMethodSignature> fun(PsiClass dom) { return buildMethodHierarchy(dom, PsiSubstitutor.EMPTY, true, new THashSet<PsiClass>(), false); } }); private PsiSuperMethodImplUtil() { } @NotNull public static PsiMethod[] findSuperMethods(PsiMethod method) { return findSuperMethods(method, null); } @NotNull public static PsiMethod[] findSuperMethods(PsiMethod method, boolean checkAccess) { if (!canHaveSuperMethod(method, checkAccess, false)) return PsiMethod.EMPTY_ARRAY; return findSuperMethodsInternal(method, null); } @NotNull public static PsiMethod[] findSuperMethods(PsiMethod method, PsiClass parentClass) { if (!canHaveSuperMethod(method, true, false)) return PsiMethod.EMPTY_ARRAY; return findSuperMethodsInternal(method, parentClass); } @NotNull private static PsiMethod[] findSuperMethodsInternal(PsiMethod method, PsiClass parentClass) { List<MethodSignatureBackedByPsiMethod> outputMethods = findSuperMethodSignatures(method, parentClass, false); return MethodSignatureUtil.convertMethodSignaturesToMethods(outputMethods); } @NotNull public static List<MethodSignatureBackedByPsiMethod> findSuperMethodSignaturesIncludingStatic(PsiMethod method, boolean checkAccess) { if (!canHaveSuperMethod(method, checkAccess, true)) return Collections.emptyList(); return findSuperMethodSignatures(method, null, true); } @NotNull private static List<MethodSignatureBackedByPsiMethod> findSuperMethodSignatures(PsiMethod method, PsiClass parentClass, boolean allowStaticMethod) { return new ArrayList<MethodSignatureBackedByPsiMethod>(SuperMethodsSearch.search(method, parentClass, true, allowStaticMethod).findAll()); } private static boolean canHaveSuperMethod(PsiMethod method, boolean checkAccess, boolean allowStaticMethod) { if (method.isConstructor()) return false; if (!allowStaticMethod && method.hasModifierProperty(PsiModifier.STATIC)) return false; if (checkAccess && method.hasModifierProperty(PsiModifier.PRIVATE)) return false; PsiClass parentClass = method.getContainingClass(); return parentClass != null && !"java.lang.Object".equals(parentClass.getQualifiedName()); } @Nullable public static PsiMethod findDeepestSuperMethod(PsiMethod method) { if (!canHaveSuperMethod(method, true, false)) return null; return DeepestSuperMethodsSearch.search(method).findFirst(); } public static PsiMethod[] findDeepestSuperMethods(PsiMethod method) { if (!canHaveSuperMethod(method, true, false)) return PsiMethod.EMPTY_ARRAY; Collection<PsiMethod> collection = DeepestSuperMethodsSearch.search(method).findAll(); return collection.toArray(new PsiMethod[collection.size()]); } private static Map<MethodSignature, HierarchicalMethodSignature> buildMethodHierarchy(PsiClass aClass, PsiSubstitutor substitutor, final boolean includePrivates, final Set<PsiClass> visited, boolean isInRawContext) { Map<MethodSignature, HierarchicalMethodSignature> result = new LinkedHashMap<MethodSignature, HierarchicalMethodSignature>(); final Map<MethodSignature, List<PsiMethod>> sameParameterErasureMethods = new THashMap<MethodSignature, List<PsiMethod>>(MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY); Map<MethodSignature, HierarchicalMethodSignatureImpl> map = new THashMap<MethodSignature, HierarchicalMethodSignatureImpl>(new TObjectHashingStrategy<MethodSignature>() { public int computeHashCode(MethodSignature signature) { return MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY.computeHashCode(signature); } public boolean equals(MethodSignature o1, MethodSignature o2) { if (!MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY.equals(o1, o2)) return false; List<PsiMethod> list = sameParameterErasureMethods.get(o1); boolean toCheckReturnType = list != null && list.size() > 1; if (!toCheckReturnType) return true; PsiType returnType1 = ((MethodSignatureBackedByPsiMethod)o1).getMethod().getReturnType(); PsiType returnType2 = ((MethodSignatureBackedByPsiMethod)o2).getMethod().getReturnType(); if (returnType1 == null && returnType2 == null) return true; if (returnType1 == null || returnType2 == null) return false; PsiType erasure1 = TypeConversionUtil.erasure(o1.getSubstitutor().substitute(returnType1)); PsiType erasure2 = TypeConversionUtil.erasure(o2.getSubstitutor().substitute(returnType2)); return erasure1.equals(erasure2); } }); for (PsiMethod method : aClass.getMethods()) { LOG.assertTrue(method.isValid()); if (!includePrivates && method.hasModifierProperty(PsiModifier.PRIVATE)) continue; final MethodSignatureBackedByPsiMethod signature = MethodSignatureBackedByPsiMethod.create(method, substitutor, isInRawContext); HierarchicalMethodSignatureImpl newH = new HierarchicalMethodSignatureImpl(signature); List<PsiMethod> list = sameParameterErasureMethods.get(signature); if (list == null) { list = new SmartList<PsiMethod>(); sameParameterErasureMethods.put(signature, list); } list.add(method); LOG.assertTrue(newH.getMethod().isValid()); result.put(signature, newH); map.put(signature, newH); } for (PsiClassType superType : aClass.getSuperTypes()) { PsiClassType.ClassResolveResult superTypeResolveResult = superType.resolveGenerics(); PsiClass superClass = superTypeResolveResult.getElement(); if (superClass == null) continue; if (!visited.add(superClass)) continue; // cyclic inheritance final PsiSubstitutor superSubstitutor = superTypeResolveResult.getSubstitutor(); PsiSubstitutor finalSubstitutor = obtainFinalSubstitutor(superClass, superSubstitutor, substitutor, isInRawContext); final boolean isInRawContextSuper = (isInRawContext || PsiUtil.isRawSubstitutor(superClass, superSubstitutor)) && superClass.getTypeParameters().length != 0; Map<MethodSignature, HierarchicalMethodSignature> superResult = buildMethodHierarchy(superClass, finalSubstitutor, false, visited, isInRawContextSuper); visited.remove(superClass); List<Pair<MethodSignature, HierarchicalMethodSignature>> flattened = new ArrayList<Pair<MethodSignature, HierarchicalMethodSignature>>(); for (Map.Entry<MethodSignature, HierarchicalMethodSignature> entry : superResult.entrySet()) { HierarchicalMethodSignature hms = entry.getValue(); MethodSignature signature = entry.getKey(); PsiClass containingClass = hms.getMethod().getContainingClass(); List<HierarchicalMethodSignature> supers = new ArrayList<HierarchicalMethodSignature>(hms.getSuperSignatures()); for (HierarchicalMethodSignature aSuper : supers) { PsiClass superContainingClass = aSuper.getMethod().getContainingClass(); if (containingClass != null && superContainingClass != null && !containingClass.isInheritor(superContainingClass, true)) { // methods must be inherited from unrelated classes, so flatten hierarchy here // class C implements SAM1, SAM2 { void methodimpl() {} } //hms.getSuperSignatures().remove(aSuper); flattened.add(new Pair<MethodSignature, HierarchicalMethodSignature>(signature, aSuper)); } } putInMap(aClass, result, map, hms, signature); } for (Pair<MethodSignature, HierarchicalMethodSignature> pair : flattened) { putInMap(aClass, result, map, pair.second, pair.first); } } for (Map.Entry<MethodSignature, HierarchicalMethodSignatureImpl> entry : map.entrySet()) { HierarchicalMethodSignatureImpl hierarchicalMethodSignature = entry.getValue(); MethodSignature methodSignature = entry.getKey(); if (result.get(methodSignature) == null && PsiUtil.isAccessible(hierarchicalMethodSignature.getMethod(), aClass, aClass)) { LOG.assertTrue(hierarchicalMethodSignature.getMethod().isValid()); result.put(methodSignature, hierarchicalMethodSignature); } } return result; } private static void putInMap(PsiClass aClass, Map<MethodSignature, HierarchicalMethodSignature> result, Map<MethodSignature, HierarchicalMethodSignatureImpl> map, HierarchicalMethodSignature hierarchicalMethodSignature, MethodSignature signature) { if (!PsiUtil.isAccessible(hierarchicalMethodSignature.getMethod(), aClass, aClass)) return; HierarchicalMethodSignatureImpl existing = map.get(signature); if (existing == null) { HierarchicalMethodSignatureImpl copy = copy(hierarchicalMethodSignature); LOG.assertTrue(copy.getMethod().isValid()); map.put(signature, copy); } else if (isReturnTypeIsMoreSpecificThan(hierarchicalMethodSignature, existing) && isSuperMethod(aClass, hierarchicalMethodSignature, existing)) { HierarchicalMethodSignatureImpl newSuper = copy(hierarchicalMethodSignature); mergeSupers(newSuper, existing); LOG.assertTrue(newSuper.getMethod().isValid()); map.put(signature, newSuper); } else if (isSuperMethod(aClass, existing, hierarchicalMethodSignature)) { mergeSupers(existing, hierarchicalMethodSignature); } // just drop an invalid method declaration there - to highlight accordingly else if (!result.containsKey(signature)) { LOG.assertTrue(hierarchicalMethodSignature.getMethod().isValid()); result.put(signature, hierarchicalMethodSignature); } } private static boolean isReturnTypeIsMoreSpecificThan(@NotNull HierarchicalMethodSignature thisSig, @NotNull HierarchicalMethodSignature thatSig) { PsiType thisRet = thisSig.getMethod().getReturnType(); PsiType thatRet = thatSig.getMethod().getReturnType(); return thatRet != null && thisRet != null && !thatRet.equals(thisRet) && TypeConversionUtil.isAssignable(thatRet, thisRet); } private static void mergeSupers(final HierarchicalMethodSignatureImpl existing, final HierarchicalMethodSignature superSignature) { for (HierarchicalMethodSignature existingSuper : existing.getSuperSignatures()) { if (existingSuper.getMethod() == superSignature.getMethod()) { for (HierarchicalMethodSignature signature : superSignature.getSuperSignatures()) { mergeSupers((HierarchicalMethodSignatureImpl)existingSuper, signature); } return; } } if (existing.getMethod() == superSignature.getMethod()) { List<HierarchicalMethodSignature> existingSupers = existing.getSuperSignatures(); for (HierarchicalMethodSignature supers : superSignature.getSuperSignatures()) { if (!existingSupers.contains(supers)) existing.addSuperSignature(copy(supers)); } } else { HierarchicalMethodSignatureImpl copy = copy(superSignature); existing.addSuperSignature(copy); } } private static boolean isSuperMethod(PsiClass aClass, HierarchicalMethodSignature hierarchicalMethodSignature, HierarchicalMethodSignature superSignatureHierarchical) { PsiMethod superMethod = superSignatureHierarchical.getMethod(); PsiClass superClass = superMethod.getContainingClass(); PsiClass containingClass = hierarchicalMethodSignature.getMethod().getContainingClass(); return !superMethod.isConstructor() && !aClass.equals(superClass) && PsiUtil.isAccessible(superMethod, aClass, aClass) && MethodSignatureUtil.isSubsignature(superSignatureHierarchical, hierarchicalMethodSignature) && superClass != null && (containingClass != null && containingClass.isInterface() == superClass.isInterface() || superClass.isInterface() || "java.lang.Object".equals(superClass.getQualifiedName())) ; } private static HierarchicalMethodSignatureImpl copy(HierarchicalMethodSignature hi) { HierarchicalMethodSignatureImpl hierarchicalMethodSignature = new HierarchicalMethodSignatureImpl(hi); for (HierarchicalMethodSignature his : hi.getSuperSignatures()) { hierarchicalMethodSignature.addSuperSignature(copy(his)); } return hierarchicalMethodSignature; } private static PsiSubstitutor obtainFinalSubstitutor(PsiClass superClass, PsiSubstitutor superSubstitutor, PsiSubstitutor derivedSubstitutor, boolean inRawContext) { if (inRawContext) { superSubstitutor = JavaPsiFacadeEx.getElementFactory(superClass.getProject()).createRawSubstitutor(derivedSubstitutor, superSubstitutor.getSubstitutionMap().keySet().toArray(PsiTypeParameter.EMPTY_ARRAY)); } Map<PsiTypeParameter, PsiType> map = null; for (PsiTypeParameter typeParameter : PsiUtil.typeParametersIterable(superClass)) { PsiType type = superSubstitutor.substitute(typeParameter); final PsiType t = derivedSubstitutor.substitute(type); if (map == null) { map = new THashMap<PsiTypeParameter, PsiType>(); } map.put(typeParameter, t); } return map == null ? PsiSubstitutor.EMPTY : JavaPsiFacade.getInstance(superClass.getProject()).getElementFactory().createSubstitutor(map); } public static Collection<HierarchicalMethodSignature> getVisibleSignatures(PsiClass aClass) { Map<MethodSignature, HierarchicalMethodSignature> map = getSignaturesMap(aClass); return map.values(); } @NotNull public static HierarchicalMethodSignature getHierarchicalMethodSignature(PsiMethod method) { PsiClass aClass = method.getContainingClass(); HierarchicalMethodSignature result = null; if (aClass != null) { result = getSignaturesMap(aClass).get(method.getSignature(PsiSubstitutor.EMPTY)); } if (result == null) { result = new HierarchicalMethodSignatureImpl((MethodSignatureBackedByPsiMethod)method.getSignature(PsiSubstitutor.EMPTY)); } return result; } private static Map<MethodSignature, HierarchicalMethodSignature> getSignaturesMap(final PsiClass aClass) { return SIGNATURES_KEY.getValue(aClass); } // uses hierarchy signature tree if available, traverses class structure by itself otherwise public static boolean processDirectSuperMethodsSmart(@NotNull PsiMethod method, @NotNull Processor<PsiMethod> superMethodProcessor) { //boolean old = PsiSuperMethodUtil.isSuperMethod(method, superMethod); PsiClass aClass = method.getContainingClass(); if (aClass == null) return false; if (!canHaveSuperMethod(method, true, false)) return false; Map<MethodSignature, HierarchicalMethodSignature> cachedMap = SIGNATURES_KEY.getCachedValueOrNull(aClass); if (cachedMap != null) { HierarchicalMethodSignature signature = cachedMap.get(method.getSignature(PsiSubstitutor.EMPTY)); if (signature != null) { List<HierarchicalMethodSignature> superSignatures = signature.getSuperSignatures(); for (HierarchicalMethodSignature superSignature : superSignatures) { if (!superMethodProcessor.process(superSignature.getMethod())) return false; } return true; } } PsiClassType[] directSupers = aClass.getSuperTypes(); for (PsiClassType directSuper : directSupers) { PsiClassType.ClassResolveResult resolveResult = directSuper.resolveGenerics(); if (resolveResult.getSubstitutor() != PsiSubstitutor.EMPTY) { // generics break; } PsiClass directSuperClass = resolveResult.getElement(); if (directSuperClass == null) continue; PsiMethod[] candidates = directSuperClass.findMethodsBySignature(method, false); for (PsiMethod candidate : candidates) { if (PsiUtil.canBeOverriden(candidate)) { if (!superMethodProcessor.process(candidate)) return false; } } return true; } List<HierarchicalMethodSignature> superSignatures = method.getHierarchicalMethodSignature().getSuperSignatures(); for (HierarchicalMethodSignature superSignature : superSignatures) { if (!superMethodProcessor.process(superSignature.getMethod())) return false; } return true; } // uses hierarchy signature tree if available, traverses class structure by itself otherwise public static boolean isSuperMethodSmart(@NotNull PsiMethod method, @NotNull PsiMethod superMethod) { //boolean old = PsiSuperMethodUtil.isSuperMethod(method, superMethod); if (method == superMethod) return false; PsiClass aClass = method.getContainingClass(); PsiClass superClass = superMethod.getContainingClass(); if (aClass == null || superClass == null || superClass == aClass) return false; if (!canHaveSuperMethod(method, true, false)) return false; PsiMethod[] superMethods = null; Map<MethodSignature, HierarchicalMethodSignature> cachedMap = SIGNATURES_KEY.getCachedValueOrNull(aClass); if (cachedMap != null) { HierarchicalMethodSignature signature = cachedMap.get(method.getSignature(PsiSubstitutor.EMPTY)); if (signature != null) { superMethods = MethodSignatureUtil.convertMethodSignaturesToMethods(signature.getSuperSignatures()); } } if (superMethods == null) { PsiClassType[] directSupers = aClass.getSuperTypes(); List<PsiMethod> found = null; boolean canceled = false; for (PsiClassType directSuper : directSupers) { PsiClassType.ClassResolveResult resolveResult = directSuper.resolveGenerics(); if (resolveResult.getSubstitutor() != PsiSubstitutor.EMPTY) { // generics canceled = true; break; } PsiClass directSuperClass = resolveResult.getElement(); if (directSuperClass == null) continue; PsiMethod[] candidates = directSuperClass.findMethodsBySignature(method, false); if (candidates.length != 0) { if (found == null) found = new ArrayList<PsiMethod>(); for (PsiMethod candidate : candidates) { if (PsiUtil.canBeOverriden(candidate)) found.add(candidate); } } } superMethods = canceled ? null : found == null ? PsiMethod.EMPTY_ARRAY : found.toArray(new PsiMethod[found.size()]); } if (superMethods == null) { superMethods = MethodSignatureUtil.convertMethodSignaturesToMethods(method.getHierarchicalMethodSignature().getSuperSignatures()); } for (PsiMethod superCandidate : superMethods) { if (superMethod.equals(superCandidate) || isSuperMethodSmart(superCandidate, superMethod)) return true; } return false; } }
java/java-impl/src/com/intellij/psi/impl/PsiSuperMethodImplUtil.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Pair; import com.intellij.psi.*; import com.intellij.psi.impl.source.HierarchicalMethodSignatureImpl; import com.intellij.psi.search.searches.DeepestSuperMethodsSearch; import com.intellij.psi.search.searches.SuperMethodsSearch; import com.intellij.psi.util.*; import com.intellij.util.NotNullFunction; import com.intellij.util.Processor; import com.intellij.util.SmartList; import gnu.trove.THashMap; import gnu.trove.THashSet; import gnu.trove.TObjectHashingStrategy; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; public class PsiSuperMethodImplUtil { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.PsiSuperMethodImplUtil"); private static final PsiCacheKey<Map<MethodSignature, HierarchicalMethodSignature>, PsiClass> SIGNATURES_KEY = PsiCacheKey .create("SIGNATURES_KEY", new NotNullFunction<PsiClass, Map<MethodSignature, HierarchicalMethodSignature>>() { @NotNull @Override public Map<MethodSignature, HierarchicalMethodSignature> fun(PsiClass dom) { return buildMethodHierarchy(dom, PsiSubstitutor.EMPTY, true, new THashSet<PsiClass>(), false); } }); private PsiSuperMethodImplUtil() { } @NotNull public static PsiMethod[] findSuperMethods(PsiMethod method) { return findSuperMethods(method, null); } @NotNull public static PsiMethod[] findSuperMethods(PsiMethod method, boolean checkAccess) { if (!canHaveSuperMethod(method, checkAccess, false)) return PsiMethod.EMPTY_ARRAY; return findSuperMethodsInternal(method, null); } @NotNull public static PsiMethod[] findSuperMethods(PsiMethod method, PsiClass parentClass) { if (!canHaveSuperMethod(method, true, false)) return PsiMethod.EMPTY_ARRAY; return findSuperMethodsInternal(method, parentClass); } @NotNull private static PsiMethod[] findSuperMethodsInternal(PsiMethod method, PsiClass parentClass) { List<MethodSignatureBackedByPsiMethod> outputMethods = findSuperMethodSignatures(method, parentClass, false); return MethodSignatureUtil.convertMethodSignaturesToMethods(outputMethods); } @NotNull public static List<MethodSignatureBackedByPsiMethod> findSuperMethodSignaturesIncludingStatic(PsiMethod method, boolean checkAccess) { if (!canHaveSuperMethod(method, checkAccess, true)) return Collections.emptyList(); return findSuperMethodSignatures(method, null, true); } @NotNull private static List<MethodSignatureBackedByPsiMethod> findSuperMethodSignatures(PsiMethod method, PsiClass parentClass, boolean allowStaticMethod) { return new ArrayList<MethodSignatureBackedByPsiMethod>(SuperMethodsSearch.search(method, parentClass, true, allowStaticMethod).findAll()); } private static boolean canHaveSuperMethod(PsiMethod method, boolean checkAccess, boolean allowStaticMethod) { if (method.isConstructor()) return false; if (!allowStaticMethod && method.hasModifierProperty(PsiModifier.STATIC)) return false; if (checkAccess && method.hasModifierProperty(PsiModifier.PRIVATE)) return false; PsiClass parentClass = method.getContainingClass(); return parentClass != null && !"java.lang.Object".equals(parentClass.getQualifiedName()); } @Nullable public static PsiMethod findDeepestSuperMethod(PsiMethod method) { if (!canHaveSuperMethod(method, true, false)) return null; return DeepestSuperMethodsSearch.search(method).findFirst(); } public static PsiMethod[] findDeepestSuperMethods(PsiMethod method) { if (!canHaveSuperMethod(method, true, false)) return PsiMethod.EMPTY_ARRAY; Collection<PsiMethod> collection = DeepestSuperMethodsSearch.search(method).findAll(); return collection.toArray(new PsiMethod[collection.size()]); } private static Map<MethodSignature, HierarchicalMethodSignature> buildMethodHierarchy(PsiClass aClass, PsiSubstitutor substitutor, final boolean includePrivates, final Set<PsiClass> visited, boolean isInRawContext) { Map<MethodSignature, HierarchicalMethodSignature> result = new LinkedHashMap<MethodSignature, HierarchicalMethodSignature>(); final Map<MethodSignature, List<PsiMethod>> sameParameterErasureMethods = new THashMap<MethodSignature, List<PsiMethod>>(MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY); Map<MethodSignature, HierarchicalMethodSignatureImpl> map = new THashMap<MethodSignature, HierarchicalMethodSignatureImpl>(new TObjectHashingStrategy<MethodSignature>() { public int computeHashCode(MethodSignature signature) { return MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY.computeHashCode(signature); } public boolean equals(MethodSignature o1, MethodSignature o2) { if (!MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY.equals(o1, o2)) return false; List<PsiMethod> list = sameParameterErasureMethods.get(o1); boolean toCheckReturnType = list != null && list.size() > 1; if (!toCheckReturnType) return true; PsiType returnType1 = ((MethodSignatureBackedByPsiMethod)o1).getMethod().getReturnType(); PsiType returnType2 = ((MethodSignatureBackedByPsiMethod)o2).getMethod().getReturnType(); if (returnType1 == null && returnType2 == null) return true; if (returnType1 == null || returnType2 == null) return false; PsiType erasure1 = TypeConversionUtil.erasure(o1.getSubstitutor().substitute(returnType1)); PsiType erasure2 = TypeConversionUtil.erasure(o2.getSubstitutor().substitute(returnType2)); return erasure1.equals(erasure2); } }); for (PsiMethod method : aClass.getMethods()) { LOG.assertTrue(method.isValid()); if (!includePrivates && method.hasModifierProperty(PsiModifier.PRIVATE)) continue; final MethodSignatureBackedByPsiMethod signature = MethodSignatureBackedByPsiMethod.create(method, substitutor, isInRawContext); HierarchicalMethodSignatureImpl newH = new HierarchicalMethodSignatureImpl(signature); List<PsiMethod> list = sameParameterErasureMethods.get(signature); if (list == null) { list = new SmartList<PsiMethod>(); sameParameterErasureMethods.put(signature, list); } list.add(method); LOG.assertTrue(newH.getMethod().isValid()); result.put(signature, newH); map.put(signature, newH); } for (PsiClassType superType : aClass.getSuperTypes()) { PsiClassType.ClassResolveResult superTypeResolveResult = superType.resolveGenerics(); PsiClass superClass = superTypeResolveResult.getElement(); if (superClass == null) continue; if (!visited.add(superClass)) continue; // cyclic inheritance final PsiSubstitutor superSubstitutor = superTypeResolveResult.getSubstitutor(); PsiSubstitutor finalSubstitutor = obtainFinalSubstitutor(superClass, superSubstitutor, substitutor, isInRawContext); final boolean isInRawContextSuper = (isInRawContext || PsiUtil.isRawSubstitutor(superClass, superSubstitutor)) && superClass.getTypeParameters().length != 0; Map<MethodSignature, HierarchicalMethodSignature> superResult = buildMethodHierarchy(superClass, finalSubstitutor, false, visited, isInRawContextSuper); visited.remove(superClass); List<Pair<MethodSignature, HierarchicalMethodSignature>> flattened = new ArrayList<Pair<MethodSignature, HierarchicalMethodSignature>>(); for (Map.Entry<MethodSignature, HierarchicalMethodSignature> entry : superResult.entrySet()) { HierarchicalMethodSignature hms = entry.getValue(); MethodSignature signature = entry.getKey(); PsiClass containingClass = hms.getMethod().getContainingClass(); List<HierarchicalMethodSignature> supers = new ArrayList<HierarchicalMethodSignature>(hms.getSuperSignatures()); for (HierarchicalMethodSignature aSuper : supers) { PsiClass superContainingClass = aSuper.getMethod().getContainingClass(); if (containingClass != null && superContainingClass != null && !containingClass.isInheritor(superContainingClass, true)) { // methods must be inherited from unrelated classes, so flatten hierarchy here // class C implements SAM1, SAM2 { void methodimpl() {} } //hms.getSuperSignatures().remove(aSuper); flattened.add(new Pair<MethodSignature, HierarchicalMethodSignature>(signature, aSuper)); } } putInMap(aClass, result, map, hms, signature); } for (Pair<MethodSignature, HierarchicalMethodSignature> pair : flattened) { putInMap(aClass, result, map, pair.second, pair.first); } } for (Map.Entry<MethodSignature, HierarchicalMethodSignatureImpl> entry : map.entrySet()) { HierarchicalMethodSignatureImpl hierarchicalMethodSignature = entry.getValue(); MethodSignature methodSignature = entry.getKey(); if (result.get(methodSignature) == null && PsiUtil.isAccessible(hierarchicalMethodSignature.getMethod(), aClass, aClass)) { LOG.assertTrue(hierarchicalMethodSignature.getMethod().isValid()); result.put(methodSignature, hierarchicalMethodSignature); } } return result; } private static void putInMap(PsiClass aClass, Map<MethodSignature, HierarchicalMethodSignature> result, Map<MethodSignature, HierarchicalMethodSignatureImpl> map, HierarchicalMethodSignature hierarchicalMethodSignature, MethodSignature signature) { if (!PsiUtil.isAccessible(hierarchicalMethodSignature.getMethod(), aClass, aClass)) return; HierarchicalMethodSignatureImpl existing = map.get(signature); if (existing == null) { map.put(signature, copy(hierarchicalMethodSignature)); } else if (isReturnTypeIsMoreSpecificThan(hierarchicalMethodSignature, existing) && isSuperMethod(aClass, hierarchicalMethodSignature, existing)) { HierarchicalMethodSignatureImpl newSuper = copy(hierarchicalMethodSignature); mergeSupers(newSuper, existing); map.put(signature, newSuper); } else if (isSuperMethod(aClass, existing, hierarchicalMethodSignature)) { mergeSupers(existing, hierarchicalMethodSignature); } // just drop an invalid method declaration there - to highlight accordingly else if (!result.containsKey(signature)) { LOG.assertTrue(hierarchicalMethodSignature.getMethod().isValid()); result.put(signature, hierarchicalMethodSignature); } } private static boolean isReturnTypeIsMoreSpecificThan(@NotNull HierarchicalMethodSignature thisSig, @NotNull HierarchicalMethodSignature thatSig) { PsiType thisRet = thisSig.getMethod().getReturnType(); PsiType thatRet = thatSig.getMethod().getReturnType(); return thatRet != null && thisRet != null && !thatRet.equals(thisRet) && TypeConversionUtil.isAssignable(thatRet, thisRet); } private static void mergeSupers(final HierarchicalMethodSignatureImpl existing, final HierarchicalMethodSignature superSignature) { for (HierarchicalMethodSignature existingSuper : existing.getSuperSignatures()) { if (existingSuper.getMethod() == superSignature.getMethod()) { for (HierarchicalMethodSignature signature : superSignature.getSuperSignatures()) { mergeSupers((HierarchicalMethodSignatureImpl)existingSuper, signature); } return; } } if (existing.getMethod() == superSignature.getMethod()) { List<HierarchicalMethodSignature> existingSupers = existing.getSuperSignatures(); for (HierarchicalMethodSignature supers : superSignature.getSuperSignatures()) { if (!existingSupers.contains(supers)) existing.addSuperSignature(copy(supers)); } } else { HierarchicalMethodSignatureImpl copy = copy(superSignature); existing.addSuperSignature(copy); } } private static boolean isSuperMethod(PsiClass aClass, HierarchicalMethodSignature hierarchicalMethodSignature, HierarchicalMethodSignature superSignatureHierarchical) { PsiMethod superMethod = superSignatureHierarchical.getMethod(); PsiClass superClass = superMethod.getContainingClass(); PsiClass containingClass = hierarchicalMethodSignature.getMethod().getContainingClass(); return !superMethod.isConstructor() && !aClass.equals(superClass) && PsiUtil.isAccessible(superMethod, aClass, aClass) && MethodSignatureUtil.isSubsignature(superSignatureHierarchical, hierarchicalMethodSignature) && superClass != null && (containingClass != null && containingClass.isInterface() == superClass.isInterface() || superClass.isInterface() || "java.lang.Object".equals(superClass.getQualifiedName())) ; } private static HierarchicalMethodSignatureImpl copy(HierarchicalMethodSignature hi) { HierarchicalMethodSignatureImpl hierarchicalMethodSignature = new HierarchicalMethodSignatureImpl(hi); for (HierarchicalMethodSignature his : hi.getSuperSignatures()) { hierarchicalMethodSignature.addSuperSignature(copy(his)); } return hierarchicalMethodSignature; } private static PsiSubstitutor obtainFinalSubstitutor(PsiClass superClass, PsiSubstitutor superSubstitutor, PsiSubstitutor derivedSubstitutor, boolean inRawContext) { if (inRawContext) { superSubstitutor = JavaPsiFacadeEx.getElementFactory(superClass.getProject()).createRawSubstitutor(derivedSubstitutor, superSubstitutor.getSubstitutionMap().keySet().toArray(PsiTypeParameter.EMPTY_ARRAY)); } Map<PsiTypeParameter, PsiType> map = null; for (PsiTypeParameter typeParameter : PsiUtil.typeParametersIterable(superClass)) { PsiType type = superSubstitutor.substitute(typeParameter); final PsiType t = derivedSubstitutor.substitute(type); if (map == null) { map = new THashMap<PsiTypeParameter, PsiType>(); } map.put(typeParameter, t); } return map == null ? PsiSubstitutor.EMPTY : JavaPsiFacade.getInstance(superClass.getProject()).getElementFactory().createSubstitutor(map); } public static Collection<HierarchicalMethodSignature> getVisibleSignatures(PsiClass aClass) { Map<MethodSignature, HierarchicalMethodSignature> map = getSignaturesMap(aClass); return map.values(); } @NotNull public static HierarchicalMethodSignature getHierarchicalMethodSignature(PsiMethod method) { PsiClass aClass = method.getContainingClass(); HierarchicalMethodSignature result = null; if (aClass != null) { result = getSignaturesMap(aClass).get(method.getSignature(PsiSubstitutor.EMPTY)); } if (result == null) { result = new HierarchicalMethodSignatureImpl((MethodSignatureBackedByPsiMethod)method.getSignature(PsiSubstitutor.EMPTY)); } return result; } private static Map<MethodSignature, HierarchicalMethodSignature> getSignaturesMap(final PsiClass aClass) { return SIGNATURES_KEY.getValue(aClass); } // uses hierarchy signature tree if available, traverses class structure by itself otherwise public static boolean processDirectSuperMethodsSmart(@NotNull PsiMethod method, @NotNull Processor<PsiMethod> superMethodProcessor) { //boolean old = PsiSuperMethodUtil.isSuperMethod(method, superMethod); PsiClass aClass = method.getContainingClass(); if (aClass == null) return false; if (!canHaveSuperMethod(method, true, false)) return false; Map<MethodSignature, HierarchicalMethodSignature> cachedMap = SIGNATURES_KEY.getCachedValueOrNull(aClass); if (cachedMap != null) { HierarchicalMethodSignature signature = cachedMap.get(method.getSignature(PsiSubstitutor.EMPTY)); if (signature != null) { List<HierarchicalMethodSignature> superSignatures = signature.getSuperSignatures(); for (HierarchicalMethodSignature superSignature : superSignatures) { if (!superMethodProcessor.process(superSignature.getMethod())) return false; } return true; } } PsiClassType[] directSupers = aClass.getSuperTypes(); for (PsiClassType directSuper : directSupers) { PsiClassType.ClassResolveResult resolveResult = directSuper.resolveGenerics(); if (resolveResult.getSubstitutor() != PsiSubstitutor.EMPTY) { // generics break; } PsiClass directSuperClass = resolveResult.getElement(); if (directSuperClass == null) continue; PsiMethod[] candidates = directSuperClass.findMethodsBySignature(method, false); for (PsiMethod candidate : candidates) { if (PsiUtil.canBeOverriden(candidate)) { if (!superMethodProcessor.process(candidate)) return false; } } return true; } List<HierarchicalMethodSignature> superSignatures = method.getHierarchicalMethodSignature().getSuperSignatures(); for (HierarchicalMethodSignature superSignature : superSignatures) { if (!superMethodProcessor.process(superSignature.getMethod())) return false; } return true; } // uses hierarchy signature tree if available, traverses class structure by itself otherwise public static boolean isSuperMethodSmart(@NotNull PsiMethod method, @NotNull PsiMethod superMethod) { //boolean old = PsiSuperMethodUtil.isSuperMethod(method, superMethod); if (method == superMethod) return false; PsiClass aClass = method.getContainingClass(); PsiClass superClass = superMethod.getContainingClass(); if (aClass == null || superClass == null || superClass == aClass) return false; if (!canHaveSuperMethod(method, true, false)) return false; PsiMethod[] superMethods = null; Map<MethodSignature, HierarchicalMethodSignature> cachedMap = SIGNATURES_KEY.getCachedValueOrNull(aClass); if (cachedMap != null) { HierarchicalMethodSignature signature = cachedMap.get(method.getSignature(PsiSubstitutor.EMPTY)); if (signature != null) { superMethods = MethodSignatureUtil.convertMethodSignaturesToMethods(signature.getSuperSignatures()); } } if (superMethods == null) { PsiClassType[] directSupers = aClass.getSuperTypes(); List<PsiMethod> found = null; boolean canceled = false; for (PsiClassType directSuper : directSupers) { PsiClassType.ClassResolveResult resolveResult = directSuper.resolveGenerics(); if (resolveResult.getSubstitutor() != PsiSubstitutor.EMPTY) { // generics canceled = true; break; } PsiClass directSuperClass = resolveResult.getElement(); if (directSuperClass == null) continue; PsiMethod[] candidates = directSuperClass.findMethodsBySignature(method, false); if (candidates.length != 0) { if (found == null) found = new ArrayList<PsiMethod>(); for (PsiMethod candidate : candidates) { if (PsiUtil.canBeOverriden(candidate)) found.add(candidate); } } } superMethods = canceled ? null : found == null ? PsiMethod.EMPTY_ARRAY : found.toArray(new PsiMethod[found.size()]); } if (superMethods == null) { superMethods = MethodSignatureUtil.convertMethodSignaturesToMethods(method.getHierarchicalMethodSignature().getSuperSignatures()); } for (PsiMethod superCandidate : superMethods) { if (superMethod.equals(superCandidate) || isSuperMethodSmart(superCandidate, superMethod)) return true; } return false; } }
method signature validity assertions for EA-26920
java/java-impl/src/com/intellij/psi/impl/PsiSuperMethodImplUtil.java
method signature validity assertions for EA-26920
Java
apache-2.0
54ae0a557d07128ce36f3b5681c69dde02cf35c4
0
loconsolutions/elasticsearch,F0lha/elasticsearch,ThiagoGarciaAlves/elasticsearch,ZTE-PaaS/elasticsearch,EasonYi/elasticsearch,kenshin233/elasticsearch,wenpos/elasticsearch,AndreKR/elasticsearch,mjason3/elasticsearch,NBSW/elasticsearch,Fsero/elasticsearch,himanshuag/elasticsearch,TonyChai24/ESSource,HarishAtGitHub/elasticsearch,TonyChai24/ESSource,bestwpw/elasticsearch,cnfire/elasticsearch-1,Liziyao/elasticsearch,jsgao0/elasticsearch,ulkas/elasticsearch,njlawton/elasticsearch,elancom/elasticsearch,iantruslove/elasticsearch,dylan8902/elasticsearch,mohit/elasticsearch,Shekharrajak/elasticsearch,MaineC/elasticsearch,gingerwizard/elasticsearch,infusionsoft/elasticsearch,ZTE-PaaS/elasticsearch,MaineC/elasticsearch,bawse/elasticsearch,mute/elasticsearch,YosuaMichael/elasticsearch,petabytedata/elasticsearch,achow/elasticsearch,smflorentino/elasticsearch,sposam/elasticsearch,iacdingping/elasticsearch,caengcjd/elasticsearch,kalburgimanjunath/elasticsearch,knight1128/elasticsearch,hanswang/elasticsearch,humandb/elasticsearch,acchen97/elasticsearch,MjAbuz/elasticsearch,ivansun1010/elasticsearch,kunallimaye/elasticsearch,nrkkalyan/elasticsearch,JackyMai/elasticsearch,jbertouch/elasticsearch,yuy168/elasticsearch,Brijeshrpatel9/elasticsearch,GlenRSmith/elasticsearch,jchampion/elasticsearch,lightslife/elasticsearch,smflorentino/elasticsearch,scottsom/elasticsearch,hydro2k/elasticsearch,kevinkluge/elasticsearch,i-am-Nathan/elasticsearch,huypx1292/elasticsearch,hirdesh2008/elasticsearch,LewayneNaidoo/elasticsearch,thecocce/elasticsearch,rmuir/elasticsearch,kenshin233/elasticsearch,jchampion/elasticsearch,Shepard1212/elasticsearch,apepper/elasticsearch,jw0201/elastic,springning/elasticsearch,mm0/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,sdauletau/elasticsearch,tsohil/elasticsearch,franklanganke/elasticsearch,JSCooke/elasticsearch,mohit/elasticsearch,kcompher/elasticsearch,markwalkom/elasticsearch,iantruslove/elasticsearch,jprante/elasticsearch,ydsakyclguozi/elasticsearch,dylan8902/elasticsearch,shreejay/elasticsearch,amit-shar/elasticsearch,cwurm/elasticsearch,NBSW/elasticsearch,trangvh/elasticsearch,amit-shar/elasticsearch,mgalushka/elasticsearch,kimimj/elasticsearch,vvcephei/elasticsearch,YosuaMichael/elasticsearch,a2lin/elasticsearch,mmaracic/elasticsearch,djschny/elasticsearch,HarishAtGitHub/elasticsearch,lmtwga/elasticsearch,KimTaehee/elasticsearch,springning/elasticsearch,ThalaivaStars/OrgRepo1,truemped/elasticsearch,Liziyao/elasticsearch,Helen-Zhao/elasticsearch,spiegela/elasticsearch,easonC/elasticsearch,rlugojr/elasticsearch,sposam/elasticsearch,franklanganke/elasticsearch,liweinan0423/elasticsearch,mm0/elasticsearch,henakamaMSFT/elasticsearch,andrestc/elasticsearch,zeroctu/elasticsearch,Brijeshrpatel9/elasticsearch,iamjakob/elasticsearch,btiernay/elasticsearch,markharwood/elasticsearch,sreeramjayan/elasticsearch,iacdingping/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,mkis-/elasticsearch,infusionsoft/elasticsearch,MaineC/elasticsearch,hechunwen/elasticsearch,mikemccand/elasticsearch,naveenhooda2000/elasticsearch,wuranbo/elasticsearch,i-am-Nathan/elasticsearch,HarishAtGitHub/elasticsearch,likaiwalkman/elasticsearch,bawse/elasticsearch,skearns64/elasticsearch,tsohil/elasticsearch,alexshadow007/elasticsearch,pozhidaevak/elasticsearch,qwerty4030/elasticsearch,knight1128/elasticsearch,aglne/elasticsearch,diendt/elasticsearch,liweinan0423/elasticsearch,hanswang/elasticsearch,gingerwizard/elasticsearch,fooljohnny/elasticsearch,MetSystem/elasticsearch,wangtuo/elasticsearch,vingupta3/elasticsearch,hafkensite/elasticsearch,sdauletau/elasticsearch,acchen97/elasticsearch,szroland/elasticsearch,dylan8902/elasticsearch,MaineC/elasticsearch,zkidkid/elasticsearch,dpursehouse/elasticsearch,jprante/elasticsearch,18098924759/elasticsearch,fernandozhu/elasticsearch,hydro2k/elasticsearch,adrianbk/elasticsearch,markharwood/elasticsearch,fforbeck/elasticsearch,wbowling/elasticsearch,loconsolutions/elasticsearch,Collaborne/elasticsearch,mortonsykes/elasticsearch,kenshin233/elasticsearch,wayeast/elasticsearch,hirdesh2008/elasticsearch,naveenhooda2000/elasticsearch,davidvgalbraith/elasticsearch,yanjunh/elasticsearch,sposam/elasticsearch,Liziyao/elasticsearch,schonfeld/elasticsearch,zhiqinghuang/elasticsearch,fooljohnny/elasticsearch,GlenRSmith/elasticsearch,strapdata/elassandra5-rc,njlawton/elasticsearch,socialrank/elasticsearch,hirdesh2008/elasticsearch,xpandan/elasticsearch,weipinghe/elasticsearch,pozhidaevak/elasticsearch,F0lha/elasticsearch,sarwarbhuiyan/elasticsearch,mjhennig/elasticsearch,petabytedata/elasticsearch,Shekharrajak/elasticsearch,fekaputra/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,mjhennig/elasticsearch,springning/elasticsearch,s1monw/elasticsearch,Helen-Zhao/elasticsearch,lchennup/elasticsearch,wimvds/elasticsearch,markwalkom/elasticsearch,shreejay/elasticsearch,vingupta3/elasticsearch,mnylen/elasticsearch,btiernay/elasticsearch,palecur/elasticsearch,robin13/elasticsearch,clintongormley/elasticsearch,jango2015/elasticsearch,bestwpw/elasticsearch,geidies/elasticsearch,spiegela/elasticsearch,henakamaMSFT/elasticsearch,huypx1292/elasticsearch,wuranbo/elasticsearch,YosuaMichael/elasticsearch,fforbeck/elasticsearch,jeteve/elasticsearch,JSCooke/elasticsearch,tsohil/elasticsearch,amaliujia/elasticsearch,pritishppai/elasticsearch,jeteve/elasticsearch,jpountz/elasticsearch,MetSystem/elasticsearch,trangvh/elasticsearch,tkssharma/elasticsearch,gingerwizard/elasticsearch,acchen97/elasticsearch,fooljohnny/elasticsearch,hafkensite/elasticsearch,andrejserafim/elasticsearch,caengcjd/elasticsearch,SergVro/elasticsearch,infusionsoft/elasticsearch,amaliujia/elasticsearch,Kakakakakku/elasticsearch,jpountz/elasticsearch,scorpionvicky/elasticsearch,fred84/elasticsearch,Flipkart/elasticsearch,wimvds/elasticsearch,schonfeld/elasticsearch,JervyShi/elasticsearch,EasonYi/elasticsearch,Fsero/elasticsearch,beiske/elasticsearch,slavau/elasticsearch,adrianbk/elasticsearch,aglne/elasticsearch,wbowling/elasticsearch,episerver/elasticsearch,ckclark/elasticsearch,andrejserafim/elasticsearch,cnfire/elasticsearch-1,alexshadow007/elasticsearch,nazarewk/elasticsearch,wayeast/elasticsearch,linglaiyao1314/elasticsearch,wangtuo/elasticsearch,Brijeshrpatel9/elasticsearch,obourgain/elasticsearch,zeroctu/elasticsearch,luiseduardohdbackup/elasticsearch,obourgain/elasticsearch,luiseduardohdbackup/elasticsearch,mnylen/elasticsearch,nezirus/elasticsearch,mcku/elasticsearch,infusionsoft/elasticsearch,lks21c/elasticsearch,Liziyao/elasticsearch,mute/elasticsearch,pritishppai/elasticsearch,karthikjaps/elasticsearch,khiraiwa/elasticsearch,mbrukman/elasticsearch,KimTaehee/elasticsearch,sdauletau/elasticsearch,LeoYao/elasticsearch,jango2015/elasticsearch,yuy168/elasticsearch,lzo/elasticsearch-1,lks21c/elasticsearch,alexkuk/elasticsearch,queirozfcom/elasticsearch,YosuaMichael/elasticsearch,davidvgalbraith/elasticsearch,tebriel/elasticsearch,HarishAtGitHub/elasticsearch,fforbeck/elasticsearch,onegambler/elasticsearch,wangyuxue/elasticsearch,njlawton/elasticsearch,xuzha/elasticsearch,nellicus/elasticsearch,zhiqinghuang/elasticsearch,schonfeld/elasticsearch,kalburgimanjunath/elasticsearch,strapdata/elassandra,HarishAtGitHub/elasticsearch,Shepard1212/elasticsearch,linglaiyao1314/elasticsearch,tahaemin/elasticsearch,vietlq/elasticsearch,queirozfcom/elasticsearch,aglne/elasticsearch,jimhooker2002/elasticsearch,andrestc/elasticsearch,kingaj/elasticsearch,golubev/elasticsearch,khiraiwa/elasticsearch,ricardocerq/elasticsearch,rhoml/elasticsearch,mnylen/elasticsearch,alexkuk/elasticsearch,yanjunh/elasticsearch,andrejserafim/elasticsearch,smflorentino/elasticsearch,rento19962/elasticsearch,franklanganke/elasticsearch,koxa29/elasticsearch,Chhunlong/elasticsearch,kubum/elasticsearch,nomoa/elasticsearch,umeshdangat/elasticsearch,pablocastro/elasticsearch,lmtwga/elasticsearch,overcome/elasticsearch,drewr/elasticsearch,likaiwalkman/elasticsearch,vroyer/elassandra,mjason3/elasticsearch,strapdata/elassandra-test,LeoYao/elasticsearch,acchen97/elasticsearch,Shekharrajak/elasticsearch,Chhunlong/elasticsearch,scorpionvicky/elasticsearch,infusionsoft/elasticsearch,kenshin233/elasticsearch,alexshadow007/elasticsearch,mrorii/elasticsearch,StefanGor/elasticsearch,overcome/elasticsearch,sc0ttkclark/elasticsearch,rmuir/elasticsearch,uschindler/elasticsearch,kingaj/elasticsearch,mgalushka/elasticsearch,linglaiyao1314/elasticsearch,nrkkalyan/elasticsearch,kalimatas/elasticsearch,caengcjd/elasticsearch,snikch/elasticsearch,phani546/elasticsearch,wbowling/elasticsearch,huanzhong/elasticsearch,kaneshin/elasticsearch,skearns64/elasticsearch,MjAbuz/elasticsearch,F0lha/elasticsearch,ckclark/elasticsearch,Chhunlong/elasticsearch,iamjakob/elasticsearch,beiske/elasticsearch,PhaedrusTheGreek/elasticsearch,weipinghe/elasticsearch,truemped/elasticsearch,yanjunh/elasticsearch,mnylen/elasticsearch,girirajsharma/elasticsearch,abibell/elasticsearch,ricardocerq/elasticsearch,hanswang/elasticsearch,mute/elasticsearch,sc0ttkclark/elasticsearch,lzo/elasticsearch-1,TonyChai24/ESSource,JSCooke/elasticsearch,nilabhsagar/elasticsearch,iacdingping/elasticsearch,NBSW/elasticsearch,huypx1292/elasticsearch,javachengwc/elasticsearch,gingerwizard/elasticsearch,ESamir/elasticsearch,huanzhong/elasticsearch,avikurapati/elasticsearch,smflorentino/elasticsearch,AndreKR/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,lightslife/elasticsearch,nilabhsagar/elasticsearch,zeroctu/elasticsearch,ZTE-PaaS/elasticsearch,mcku/elasticsearch,jpountz/elasticsearch,alexshadow007/elasticsearch,GlenRSmith/elasticsearch,umeshdangat/elasticsearch,andrestc/elasticsearch,mapr/elasticsearch,nomoa/elasticsearch,szroland/elasticsearch,likaiwalkman/elasticsearch,fforbeck/elasticsearch,yynil/elasticsearch,abibell/elasticsearch,jaynblue/elasticsearch,sreeramjayan/elasticsearch,18098924759/elasticsearch,pritishppai/elasticsearch,Shekharrajak/elasticsearch,yuy168/elasticsearch,jango2015/elasticsearch,cwurm/elasticsearch,ivansun1010/elasticsearch,btiernay/elasticsearch,robin13/elasticsearch,kunallimaye/elasticsearch,jw0201/elastic,masaruh/elasticsearch,dataduke/elasticsearch,smflorentino/elasticsearch,AshishThakur/elasticsearch,adrianbk/elasticsearch,fekaputra/elasticsearch,kalburgimanjunath/elasticsearch,drewr/elasticsearch,mbrukman/elasticsearch,tahaemin/elasticsearch,alexbrasetvik/elasticsearch,YosuaMichael/elasticsearch,tahaemin/elasticsearch,Charlesdong/elasticsearch,jaynblue/elasticsearch,fekaputra/elasticsearch,aglne/elasticsearch,s1monw/elasticsearch,amit-shar/elasticsearch,nomoa/elasticsearch,nilabhsagar/elasticsearch,sposam/elasticsearch,wittyameta/elasticsearch,robin13/elasticsearch,davidvgalbraith/elasticsearch,milodky/elasticsearch,lmtwga/elasticsearch,lydonchandra/elasticsearch,mjason3/elasticsearch,shreejay/elasticsearch,Brijeshrpatel9/elasticsearch,wayeast/elasticsearch,henakamaMSFT/elasticsearch,18098924759/elasticsearch,Shekharrajak/elasticsearch,Chhunlong/elasticsearch,mjhennig/elasticsearch,mrorii/elasticsearch,YosuaMichael/elasticsearch,Kakakakakku/elasticsearch,mohit/elasticsearch,shreejay/elasticsearch,liweinan0423/elasticsearch,JackyMai/elasticsearch,zkidkid/elasticsearch,queirozfcom/elasticsearch,NBSW/elasticsearch,zeroctu/elasticsearch,luiseduardohdbackup/elasticsearch,tebriel/elasticsearch,JervyShi/elasticsearch,ESamir/elasticsearch,TonyChai24/ESSource,golubev/elasticsearch,camilojd/elasticsearch,PhaedrusTheGreek/elasticsearch,linglaiyao1314/elasticsearch,gfyoung/elasticsearch,sarwarbhuiyan/elasticsearch,zkidkid/elasticsearch,glefloch/elasticsearch,masaruh/elasticsearch,mm0/elasticsearch,uschindler/elasticsearch,uschindler/elasticsearch,queirozfcom/elasticsearch,ThalaivaStars/OrgRepo1,kcompher/elasticsearch,weipinghe/elasticsearch,i-am-Nathan/elasticsearch,ImpressTV/elasticsearch,nazarewk/elasticsearch,yynil/elasticsearch,markwalkom/elasticsearch,truemped/elasticsearch,ivansun1010/elasticsearch,kubum/elasticsearch,yynil/elasticsearch,huypx1292/elasticsearch,ouyangkongtong/elasticsearch,wbowling/elasticsearch,rento19962/elasticsearch,iacdingping/elasticsearch,kubum/elasticsearch,mmaracic/elasticsearch,alexshadow007/elasticsearch,brandonkearby/elasticsearch,xingguang2013/elasticsearch,mute/elasticsearch,18098924759/elasticsearch,jpountz/elasticsearch,djschny/elasticsearch,lchennup/elasticsearch,martinstuga/elasticsearch,zhiqinghuang/elasticsearch,sneivandt/elasticsearch,iamjakob/elasticsearch,nknize/elasticsearch,iacdingping/elasticsearch,zeroctu/elasticsearch,kalimatas/elasticsearch,MisterAndersen/elasticsearch,mcku/elasticsearch,queirozfcom/elasticsearch,brandonkearby/elasticsearch,IanvsPoplicola/elasticsearch,strapdata/elassandra-test,hafkensite/elasticsearch,scottsom/elasticsearch,umeshdangat/elasticsearch,jimczi/elasticsearch,camilojd/elasticsearch,rajanm/elasticsearch,lightslife/elasticsearch,mapr/elasticsearch,wittyameta/elasticsearch,alexbrasetvik/elasticsearch,nilabhsagar/elasticsearch,NBSW/elasticsearch,palecur/elasticsearch,strapdata/elassandra-test,wayeast/elasticsearch,jimczi/elasticsearch,kevinkluge/elasticsearch,elasticdog/elasticsearch,nrkkalyan/elasticsearch,strapdata/elassandra5-rc,sauravmondallive/elasticsearch,easonC/elasticsearch,ESamir/elasticsearch,pranavraman/elasticsearch,andrejserafim/elasticsearch,AshishThakur/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,koxa29/elasticsearch,vingupta3/elasticsearch,ckclark/elasticsearch,pritishppai/elasticsearch,btiernay/elasticsearch,LewayneNaidoo/elasticsearch,socialrank/elasticsearch,hirdesh2008/elasticsearch,sdauletau/elasticsearch,nezirus/elasticsearch,awislowski/elasticsearch,lks21c/elasticsearch,EasonYi/elasticsearch,PhaedrusTheGreek/elasticsearch,Flipkart/elasticsearch,dataduke/elasticsearch,yongminxia/elasticsearch,fred84/elasticsearch,winstonewert/elasticsearch,tahaemin/elasticsearch,artnowo/elasticsearch,fred84/elasticsearch,nellicus/elasticsearch,gingerwizard/elasticsearch,diendt/elasticsearch,MetSystem/elasticsearch,andrejserafim/elasticsearch,TonyChai24/ESSource,masaruh/elasticsearch,girirajsharma/elasticsearch,MichaelLiZhou/elasticsearch,slavau/elasticsearch,ckclark/elasticsearch,GlenRSmith/elasticsearch,SergVro/elasticsearch,18098924759/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,diendt/elasticsearch,Flipkart/elasticsearch,zhiqinghuang/elasticsearch,nezirus/elasticsearch,mjason3/elasticsearch,snikch/elasticsearch,weipinghe/elasticsearch,rmuir/elasticsearch,fooljohnny/elasticsearch,jprante/elasticsearch,MjAbuz/elasticsearch,overcome/elasticsearch,vietlq/elasticsearch,likaiwalkman/elasticsearch,ivansun1010/elasticsearch,drewr/elasticsearch,huanzhong/elasticsearch,markllama/elasticsearch,beiske/elasticsearch,yongminxia/elasticsearch,a2lin/elasticsearch,uschindler/elasticsearch,abibell/elasticsearch,episerver/elasticsearch,njlawton/elasticsearch,pablocastro/elasticsearch,C-Bish/elasticsearch,maddin2016/elasticsearch,rhoml/elasticsearch,pranavraman/elasticsearch,liweinan0423/elasticsearch,kcompher/elasticsearch,TonyChai24/ESSource,rlugojr/elasticsearch,JervyShi/elasticsearch,karthikjaps/elasticsearch,lchennup/elasticsearch,mjhennig/elasticsearch,liweinan0423/elasticsearch,amit-shar/elasticsearch,golubev/elasticsearch,rlugojr/elasticsearch,NBSW/elasticsearch,IanvsPoplicola/elasticsearch,martinstuga/elasticsearch,NBSW/elasticsearch,JackyMai/elasticsearch,masaruh/elasticsearch,Liziyao/elasticsearch,nknize/elasticsearch,iantruslove/elasticsearch,Widen/elasticsearch,huypx1292/elasticsearch,markwalkom/elasticsearch,achow/elasticsearch,mnylen/elasticsearch,hirdesh2008/elasticsearch,golubev/elasticsearch,jpountz/elasticsearch,avikurapati/elasticsearch,javachengwc/elasticsearch,Kakakakakku/elasticsearch,ulkas/elasticsearch,vingupta3/elasticsearch,hechunwen/elasticsearch,jprante/elasticsearch,Uiho/elasticsearch,wenpos/elasticsearch,wayeast/elasticsearch,MichaelLiZhou/elasticsearch,huypx1292/elasticsearch,nrkkalyan/elasticsearch,milodky/elasticsearch,nilabhsagar/elasticsearch,mbrukman/elasticsearch,mcku/elasticsearch,cnfire/elasticsearch-1,tebriel/elasticsearch,luiseduardohdbackup/elasticsearch,nellicus/elasticsearch,Widen/elasticsearch,sposam/elasticsearch,elancom/elasticsearch,diendt/elasticsearch,khiraiwa/elasticsearch,jaynblue/elasticsearch,SergVro/elasticsearch,ulkas/elasticsearch,sauravmondallive/elasticsearch,knight1128/elasticsearch,himanshuag/elasticsearch,Shekharrajak/elasticsearch,jchampion/elasticsearch,mmaracic/elasticsearch,yongminxia/elasticsearch,tsohil/elasticsearch,luiseduardohdbackup/elasticsearch,jeteve/elasticsearch,Stacey-Gammon/elasticsearch,ImpressTV/elasticsearch,StefanGor/elasticsearch,rmuir/elasticsearch,Ansh90/elasticsearch,jbertouch/elasticsearch,mgalushka/elasticsearch,lzo/elasticsearch-1,ThiagoGarciaAlves/elasticsearch,pablocastro/elasticsearch,C-Bish/elasticsearch,wuranbo/elasticsearch,vroyer/elassandra,elasticdog/elasticsearch,wangyuxue/elasticsearch,nezirus/elasticsearch,jw0201/elastic,nrkkalyan/elasticsearch,tkssharma/elasticsearch,socialrank/elasticsearch,Brijeshrpatel9/elasticsearch,MichaelLiZhou/elasticsearch,mcku/elasticsearch,HarishAtGitHub/elasticsearch,andrejserafim/elasticsearch,ThiagoGarciaAlves/elasticsearch,dongjoon-hyun/elasticsearch,truemped/elasticsearch,TonyChai24/ESSource,Stacey-Gammon/elasticsearch,Fsero/elasticsearch,btiernay/elasticsearch,C-Bish/elasticsearch,i-am-Nathan/elasticsearch,gfyoung/elasticsearch,vietlq/elasticsearch,obourgain/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,phani546/elasticsearch,ImpressTV/elasticsearch,sarwarbhuiyan/elasticsearch,JervyShi/elasticsearch,karthikjaps/elasticsearch,s1monw/elasticsearch,phani546/elasticsearch,djschny/elasticsearch,snikch/elasticsearch,Siddartha07/elasticsearch,strapdata/elassandra-test,geidies/elasticsearch,Fsero/elasticsearch,kaneshin/elasticsearch,mgalushka/elasticsearch,StefanGor/elasticsearch,kenshin233/elasticsearch,yanjunh/elasticsearch,lightslife/elasticsearch,jeteve/elasticsearch,uschindler/elasticsearch,truemped/elasticsearch,nellicus/elasticsearch,MisterAndersen/elasticsearch,MisterAndersen/elasticsearch,rento19962/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,mbrukman/elasticsearch,Kakakakakku/elasticsearch,SergVro/elasticsearch,snikch/elasticsearch,martinstuga/elasticsearch,ESamir/elasticsearch,strapdata/elassandra-test,markllama/elasticsearch,HonzaKral/elasticsearch,bestwpw/elasticsearch,Charlesdong/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,areek/elasticsearch,Rygbee/elasticsearch,xingguang2013/elasticsearch,kaneshin/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,koxa29/elasticsearch,pablocastro/elasticsearch,tebriel/elasticsearch,alexbrasetvik/elasticsearch,zeroctu/elasticsearch,geidies/elasticsearch,fred84/elasticsearch,beiske/elasticsearch,myelin/elasticsearch,strapdata/elassandra-test,gingerwizard/elasticsearch,slavau/elasticsearch,humandb/elasticsearch,dataduke/elasticsearch,martinstuga/elasticsearch,kunallimaye/elasticsearch,Siddartha07/elasticsearch,IanvsPoplicola/elasticsearch,btiernay/elasticsearch,JSCooke/elasticsearch,jchampion/elasticsearch,wittyameta/elasticsearch,mapr/elasticsearch,HonzaKral/elasticsearch,chirilo/elasticsearch,yuy168/elasticsearch,pablocastro/elasticsearch,Ansh90/elasticsearch,ricardocerq/elasticsearch,snikch/elasticsearch,Kakakakakku/elasticsearch,JSCooke/elasticsearch,mbrukman/elasticsearch,a2lin/elasticsearch,strapdata/elassandra,HonzaKral/elasticsearch,kimimj/elasticsearch,dataduke/elasticsearch,nellicus/elasticsearch,diendt/elasticsearch,wimvds/elasticsearch,ouyangkongtong/elasticsearch,sc0ttkclark/elasticsearch,onegambler/elasticsearch,ulkas/elasticsearch,skearns64/elasticsearch,ouyangkongtong/elasticsearch,infusionsoft/elasticsearch,yuy168/elasticsearch,ricardocerq/elasticsearch,areek/elasticsearch,martinstuga/elasticsearch,nellicus/elasticsearch,luiseduardohdbackup/elasticsearch,easonC/elasticsearch,clintongormley/elasticsearch,vingupta3/elasticsearch,scorpionvicky/elasticsearch,iantruslove/elasticsearch,nknize/elasticsearch,Rygbee/elasticsearch,slavau/elasticsearch,khiraiwa/elasticsearch,EasonYi/elasticsearch,AndreKR/elasticsearch,kimimj/elasticsearch,maddin2016/elasticsearch,hafkensite/elasticsearch,rajanm/elasticsearch,caengcjd/elasticsearch,F0lha/elasticsearch,Widen/elasticsearch,winstonewert/elasticsearch,jimhooker2002/elasticsearch,Uiho/elasticsearch,jimhooker2002/elasticsearch,weipinghe/elasticsearch,mcku/elasticsearch,coding0011/elasticsearch,scottsom/elasticsearch,drewr/elasticsearch,ivansun1010/elasticsearch,chirilo/elasticsearch,jimhooker2002/elasticsearch,nknize/elasticsearch,areek/elasticsearch,socialrank/elasticsearch,qwerty4030/elasticsearch,djschny/elasticsearch,kingaj/elasticsearch,hydro2k/elasticsearch,wuranbo/elasticsearch,Siddartha07/elasticsearch,sc0ttkclark/elasticsearch,himanshuag/elasticsearch,fernandozhu/elasticsearch,tkssharma/elasticsearch,hanswang/elasticsearch,xuzha/elasticsearch,dongjoon-hyun/elasticsearch,markllama/elasticsearch,ulkas/elasticsearch,yongminxia/elasticsearch,humandb/elasticsearch,dongjoon-hyun/elasticsearch,Flipkart/elasticsearch,amaliujia/elasticsearch,sauravmondallive/elasticsearch,lmtwga/elasticsearch,ZTE-PaaS/elasticsearch,kingaj/elasticsearch,truemped/elasticsearch,onegambler/elasticsearch,nellicus/elasticsearch,yanjunh/elasticsearch,jbertouch/elasticsearch,onegambler/elasticsearch,dylan8902/elasticsearch,sneivandt/elasticsearch,andrestc/elasticsearch,mnylen/elasticsearch,springning/elasticsearch,MetSystem/elasticsearch,LeoYao/elasticsearch,ESamir/elasticsearch,Rygbee/elasticsearch,ckclark/elasticsearch,clintongormley/elasticsearch,kubum/elasticsearch,xingguang2013/elasticsearch,milodky/elasticsearch,shreejay/elasticsearch,sneivandt/elasticsearch,karthikjaps/elasticsearch,naveenhooda2000/elasticsearch,jw0201/elastic,njlawton/elasticsearch,lydonchandra/elasticsearch,markwalkom/elasticsearch,gingerwizard/elasticsearch,wayeast/elasticsearch,iamjakob/elasticsearch,jbertouch/elasticsearch,spiegela/elasticsearch,franklanganke/elasticsearch,xuzha/elasticsearch,vvcephei/elasticsearch,mikemccand/elasticsearch,knight1128/elasticsearch,rmuir/elasticsearch,rmuir/elasticsearch,apepper/elasticsearch,yongminxia/elasticsearch,Shekharrajak/elasticsearch,AshishThakur/elasticsearch,dylan8902/elasticsearch,huanzhong/elasticsearch,overcome/elasticsearch,lightslife/elasticsearch,nomoa/elasticsearch,Charlesdong/elasticsearch,iacdingping/elasticsearch,kaneshin/elasticsearch,alexkuk/elasticsearch,Collaborne/elasticsearch,likaiwalkman/elasticsearch,jsgao0/elasticsearch,Kakakakakku/elasticsearch,gmarz/elasticsearch,xingguang2013/elasticsearch,glefloch/elasticsearch,myelin/elasticsearch,vingupta3/elasticsearch,rajanm/elasticsearch,mmaracic/elasticsearch,camilojd/elasticsearch,markllama/elasticsearch,nrkkalyan/elasticsearch,wayeast/elasticsearch,humandb/elasticsearch,lydonchandra/elasticsearch,ThiagoGarciaAlves/elasticsearch,knight1128/elasticsearch,fooljohnny/elasticsearch,adrianbk/elasticsearch,hafkensite/elasticsearch,easonC/elasticsearch,i-am-Nathan/elasticsearch,adrianbk/elasticsearch,ydsakyclguozi/elasticsearch,Fsero/elasticsearch,petabytedata/elasticsearch,EasonYi/elasticsearch,mmaracic/elasticsearch,kimimj/elasticsearch,davidvgalbraith/elasticsearch,strapdata/elassandra,Rygbee/elasticsearch,bawse/elasticsearch,mgalushka/elasticsearch,Liziyao/elasticsearch,areek/elasticsearch,Stacey-Gammon/elasticsearch,Charlesdong/elasticsearch,umeshdangat/elasticsearch,jimczi/elasticsearch,areek/elasticsearch,caengcjd/elasticsearch,LeoYao/elasticsearch,Shepard1212/elasticsearch,nazarewk/elasticsearch,koxa29/elasticsearch,strapdata/elassandra5-rc,SergVro/elasticsearch,masterweb121/elasticsearch,trangvh/elasticsearch,fforbeck/elasticsearch,gmarz/elasticsearch,brandonkearby/elasticsearch,loconsolutions/elasticsearch,linglaiyao1314/elasticsearch,linglaiyao1314/elasticsearch,mapr/elasticsearch,coding0011/elasticsearch,sc0ttkclark/elasticsearch,jchampion/elasticsearch,elancom/elasticsearch,xingguang2013/elasticsearch,YosuaMichael/elasticsearch,alexbrasetvik/elasticsearch,kalimatas/elasticsearch,scottsom/elasticsearch,phani546/elasticsearch,xingguang2013/elasticsearch,JervyShi/elasticsearch,overcome/elasticsearch,jsgao0/elasticsearch,hydro2k/elasticsearch,kcompher/elasticsearch,girirajsharma/elasticsearch,loconsolutions/elasticsearch,yynil/elasticsearch,hechunwen/elasticsearch,coding0011/elasticsearch,snikch/elasticsearch,kubum/elasticsearch,knight1128/elasticsearch,yynil/elasticsearch,humandb/elasticsearch,fekaputra/elasticsearch,HonzaKral/elasticsearch,Collaborne/elasticsearch,winstonewert/elasticsearch,golubev/elasticsearch,alexbrasetvik/elasticsearch,MetSystem/elasticsearch,elancom/elasticsearch,slavau/elasticsearch,achow/elasticsearch,dongjoon-hyun/elasticsearch,iamjakob/elasticsearch,vroyer/elasticassandra,ZTE-PaaS/elasticsearch,polyfractal/elasticsearch,ouyangkongtong/elasticsearch,Widen/elasticsearch,xpandan/elasticsearch,ThalaivaStars/OrgRepo1,jw0201/elastic,kevinkluge/elasticsearch,springning/elasticsearch,dpursehouse/elasticsearch,wimvds/elasticsearch,szroland/elasticsearch,nezirus/elasticsearch,amit-shar/elasticsearch,brandonkearby/elasticsearch,ouyangkongtong/elasticsearch,jpountz/elasticsearch,palecur/elasticsearch,mbrukman/elasticsearch,brandonkearby/elasticsearch,mrorii/elasticsearch,wenpos/elasticsearch,dpursehouse/elasticsearch,wangtuo/elasticsearch,tahaemin/elasticsearch,markllama/elasticsearch,aglne/elasticsearch,szroland/elasticsearch,jbertouch/elasticsearch,pozhidaevak/elasticsearch,linglaiyao1314/elasticsearch,obourgain/elasticsearch,kimimj/elasticsearch,Charlesdong/elasticsearch,beiske/elasticsearch,hirdesh2008/elasticsearch,kcompher/elasticsearch,rhoml/elasticsearch,thecocce/elasticsearch,winstonewert/elasticsearch,amit-shar/elasticsearch,kingaj/elasticsearch,geidies/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,polyfractal/elasticsearch,springning/elasticsearch,amit-shar/elasticsearch,wuranbo/elasticsearch,khiraiwa/elasticsearch,episerver/elasticsearch,jeteve/elasticsearch,AshishThakur/elasticsearch,Ansh90/elasticsearch,clintongormley/elasticsearch,polyfractal/elasticsearch,markllama/elasticsearch,jango2015/elasticsearch,KimTaehee/elasticsearch,himanshuag/elasticsearch,mgalushka/elasticsearch,wimvds/elasticsearch,petabytedata/elasticsearch,lmtwga/elasticsearch,sposam/elasticsearch,alexkuk/elasticsearch,jimczi/elasticsearch,chirilo/elasticsearch,Uiho/elasticsearch,polyfractal/elasticsearch,nknize/elasticsearch,LeoYao/elasticsearch,abibell/elasticsearch,skearns64/elasticsearch,kingaj/elasticsearch,petabytedata/elasticsearch,djschny/elasticsearch,dataduke/elasticsearch,javachengwc/elasticsearch,rajanm/elasticsearch,fekaputra/elasticsearch,maddin2016/elasticsearch,vietlq/elasticsearch,Siddartha07/elasticsearch,elasticdog/elasticsearch,girirajsharma/elasticsearch,lks21c/elasticsearch,adrianbk/elasticsearch,F0lha/elasticsearch,LewayneNaidoo/elasticsearch,vroyer/elasticassandra,Charlesdong/elasticsearch,robin13/elasticsearch,JackyMai/elasticsearch,myelin/elasticsearch,markharwood/elasticsearch,dylan8902/elasticsearch,jango2015/elasticsearch,lzo/elasticsearch-1,mrorii/elasticsearch,amaliujia/elasticsearch,mm0/elasticsearch,rajanm/elasticsearch,sreeramjayan/elasticsearch,zkidkid/elasticsearch,hydro2k/elasticsearch,fred84/elasticsearch,zhiqinghuang/elasticsearch,nazarewk/elasticsearch,SergVro/elasticsearch,sreeramjayan/elasticsearch,ouyangkongtong/elasticsearch,Liziyao/elasticsearch,cwurm/elasticsearch,kalburgimanjunath/elasticsearch,strapdata/elassandra5-rc,ydsakyclguozi/elasticsearch,naveenhooda2000/elasticsearch,lydonchandra/elasticsearch,acchen97/elasticsearch,lchennup/elasticsearch,winstonewert/elasticsearch,alexkuk/elasticsearch,mjason3/elasticsearch,iantruslove/elasticsearch,fooljohnny/elasticsearch,martinstuga/elasticsearch,lzo/elasticsearch-1,s1monw/elasticsearch,KimTaehee/elasticsearch,franklanganke/elasticsearch,sauravmondallive/elasticsearch,umeshdangat/elasticsearch,fernandozhu/elasticsearch,xuzha/elasticsearch,kunallimaye/elasticsearch,queirozfcom/elasticsearch,diendt/elasticsearch,geidies/elasticsearch,jaynblue/elasticsearch,pranavraman/elasticsearch,dpursehouse/elasticsearch,glefloch/elasticsearch,jimhooker2002/elasticsearch,sdauletau/elasticsearch,artnowo/elasticsearch,kalimatas/elasticsearch,mortonsykes/elasticsearch,Charlesdong/elasticsearch,robin13/elasticsearch,sposam/elasticsearch,lchennup/elasticsearch,kenshin233/elasticsearch,mbrukman/elasticsearch,karthikjaps/elasticsearch,kimimj/elasticsearch,Collaborne/elasticsearch,caengcjd/elasticsearch,artnowo/elasticsearch,Shepard1212/elasticsearch,vietlq/elasticsearch,tsohil/elasticsearch,qwerty4030/elasticsearch,mikemccand/elasticsearch,easonC/elasticsearch,vroyer/elassandra,mohit/elasticsearch,bestwpw/elasticsearch,btiernay/elasticsearch,18098924759/elasticsearch,MisterAndersen/elasticsearch,mortonsykes/elasticsearch,wbowling/elasticsearch,bestwpw/elasticsearch,gfyoung/elasticsearch,iantruslove/elasticsearch,abibell/elasticsearch,a2lin/elasticsearch,amaliujia/elasticsearch,apepper/elasticsearch,gmarz/elasticsearch,vvcephei/elasticsearch,fekaputra/elasticsearch,loconsolutions/elasticsearch,achow/elasticsearch,achow/elasticsearch,hechunwen/elasticsearch,Ansh90/elasticsearch,wittyameta/elasticsearch,elasticdog/elasticsearch,Shepard1212/elasticsearch,ydsakyclguozi/elasticsearch,koxa29/elasticsearch,pranavraman/elasticsearch,zkidkid/elasticsearch,pritishppai/elasticsearch,wittyameta/elasticsearch,beiske/elasticsearch,tkssharma/elasticsearch,zhiqinghuang/elasticsearch,Chhunlong/elasticsearch,MaineC/elasticsearch,bestwpw/elasticsearch,mohit/elasticsearch,ckclark/elasticsearch,KimTaehee/elasticsearch,wangtuo/elasticsearch,IanvsPoplicola/elasticsearch,strapdata/elassandra,onegambler/elasticsearch,scottsom/elasticsearch,kalburgimanjunath/elasticsearch,clintongormley/elasticsearch,camilojd/elasticsearch,ThalaivaStars/OrgRepo1,hechunwen/elasticsearch,sdauletau/elasticsearch,kingaj/elasticsearch,jprante/elasticsearch,masterweb121/elasticsearch,koxa29/elasticsearch,fernandozhu/elasticsearch,humandb/elasticsearch,jimhooker2002/elasticsearch,Ansh90/elasticsearch,masterweb121/elasticsearch,rhoml/elasticsearch,jaynblue/elasticsearch,StefanGor/elasticsearch,dpursehouse/elasticsearch,apepper/elasticsearch,henakamaMSFT/elasticsearch,sreeramjayan/elasticsearch,Helen-Zhao/elasticsearch,masaruh/elasticsearch,jeteve/elasticsearch,bestwpw/elasticsearch,awislowski/elasticsearch,vvcephei/elasticsearch,jango2015/elasticsearch,ulkas/elasticsearch,PhaedrusTheGreek/elasticsearch,areek/elasticsearch,scorpionvicky/elasticsearch,MichaelLiZhou/elasticsearch,wittyameta/elasticsearch,MetSystem/elasticsearch,tahaemin/elasticsearch,ThalaivaStars/OrgRepo1,himanshuag/elasticsearch,khiraiwa/elasticsearch,apepper/elasticsearch,myelin/elasticsearch,davidvgalbraith/elasticsearch,nomoa/elasticsearch,socialrank/elasticsearch,petabytedata/elasticsearch,mjhennig/elasticsearch,javachengwc/elasticsearch,spiegela/elasticsearch,alexkuk/elasticsearch,hanswang/elasticsearch,glefloch/elasticsearch,mkis-/elasticsearch,mkis-/elasticsearch,Stacey-Gammon/elasticsearch,elancom/elasticsearch,xpandan/elasticsearch,socialrank/elasticsearch,vingupta3/elasticsearch,mnylen/elasticsearch,djschny/elasticsearch,palecur/elasticsearch,kevinkluge/elasticsearch,lydonchandra/elasticsearch,schonfeld/elasticsearch,jaynblue/elasticsearch,iantruslove/elasticsearch,maddin2016/elasticsearch,rhoml/elasticsearch,thecocce/elasticsearch,schonfeld/elasticsearch,henakamaMSFT/elasticsearch,jimczi/elasticsearch,gfyoung/elasticsearch,rhoml/elasticsearch,hafkensite/elasticsearch,jsgao0/elasticsearch,pranavraman/elasticsearch,mcku/elasticsearch,springning/elasticsearch,Widen/elasticsearch,abibell/elasticsearch,cnfire/elasticsearch-1,onegambler/elasticsearch,wangyuxue/elasticsearch,sdauletau/elasticsearch,acchen97/elasticsearch,EasonYi/elasticsearch,scorpionvicky/elasticsearch,masterweb121/elasticsearch,EasonYi/elasticsearch,ImpressTV/elasticsearch,socialrank/elasticsearch,ThiagoGarciaAlves/elasticsearch,slavau/elasticsearch,lzo/elasticsearch-1,kevinkluge/elasticsearch,cwurm/elasticsearch,jsgao0/elasticsearch,dylan8902/elasticsearch,hydro2k/elasticsearch,ESamir/elasticsearch,hirdesh2008/elasticsearch,sarwarbhuiyan/elasticsearch,weipinghe/elasticsearch,kunallimaye/elasticsearch,mrorii/elasticsearch,abibell/elasticsearch,obourgain/elasticsearch,phani546/elasticsearch,caengcjd/elasticsearch,davidvgalbraith/elasticsearch,MetSystem/elasticsearch,qwerty4030/elasticsearch,himanshuag/elasticsearch,dataduke/elasticsearch,mrorii/elasticsearch,lmtwga/elasticsearch,geidies/elasticsearch,MjAbuz/elasticsearch,Flipkart/elasticsearch,trangvh/elasticsearch,PhaedrusTheGreek/elasticsearch,cwurm/elasticsearch,thecocce/elasticsearch,mgalushka/elasticsearch,StefanGor/elasticsearch,kimimj/elasticsearch,ydsakyclguozi/elasticsearch,lchennup/elasticsearch,milodky/elasticsearch,F0lha/elasticsearch,adrianbk/elasticsearch,awislowski/elasticsearch,hydro2k/elasticsearch,JackyMai/elasticsearch,Uiho/elasticsearch,karthikjaps/elasticsearch,kunallimaye/elasticsearch,jsgao0/elasticsearch,AndreKR/elasticsearch,apepper/elasticsearch,lightslife/elasticsearch,sarwarbhuiyan/elasticsearch,avikurapati/elasticsearch,elasticdog/elasticsearch,Uiho/elasticsearch,andrestc/elasticsearch,artnowo/elasticsearch,lightslife/elasticsearch,MjAbuz/elasticsearch,sneivandt/elasticsearch,pablocastro/elasticsearch,18098924759/elasticsearch,zhiqinghuang/elasticsearch,karthikjaps/elasticsearch,jimhooker2002/elasticsearch,mortonsykes/elasticsearch,ckclark/elasticsearch,xpandan/elasticsearch,polyfractal/elasticsearch,overcome/elasticsearch,tahaemin/elasticsearch,Collaborne/elasticsearch,rento19962/elasticsearch,Collaborne/elasticsearch,knight1128/elasticsearch,alexbrasetvik/elasticsearch,petabytedata/elasticsearch,spiegela/elasticsearch,andrestc/elasticsearch,a2lin/elasticsearch,markharwood/elasticsearch,lks21c/elasticsearch,smflorentino/elasticsearch,fekaputra/elasticsearch,Rygbee/elasticsearch,hafkensite/elasticsearch,LewayneNaidoo/elasticsearch,djschny/elasticsearch,onegambler/elasticsearch,avikurapati/elasticsearch,mikemccand/elasticsearch,rlugojr/elasticsearch,tkssharma/elasticsearch,Collaborne/elasticsearch,slavau/elasticsearch,mortonsykes/elasticsearch,Rygbee/elasticsearch,jw0201/elastic,javachengwc/elasticsearch,drewr/elasticsearch,drewr/elasticsearch,jeteve/elasticsearch,xpandan/elasticsearch,yongminxia/elasticsearch,likaiwalkman/elasticsearch,PhaedrusTheGreek/elasticsearch,elancom/elasticsearch,markwalkom/elasticsearch,phani546/elasticsearch,girirajsharma/elasticsearch,markharwood/elasticsearch,weipinghe/elasticsearch,naveenhooda2000/elasticsearch,wimvds/elasticsearch,mjhennig/elasticsearch,episerver/elasticsearch,wittyameta/elasticsearch,bawse/elasticsearch,kevinkluge/elasticsearch,ImpressTV/elasticsearch,masterweb121/elasticsearch,awislowski/elasticsearch,Stacey-Gammon/elasticsearch,likaiwalkman/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,kaneshin/elasticsearch,s1monw/elasticsearch,huanzhong/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,franklanganke/elasticsearch,nazarewk/elasticsearch,polyfractal/elasticsearch,queirozfcom/elasticsearch,pranavraman/elasticsearch,mkis-/elasticsearch,rento19962/elasticsearch,wimvds/elasticsearch,drewr/elasticsearch,Uiho/elasticsearch,AndreKR/elasticsearch,awislowski/elasticsearch,LewayneNaidoo/elasticsearch,kaneshin/elasticsearch,IanvsPoplicola/elasticsearch,Uiho/elasticsearch,pritishppai/elasticsearch,milodky/elasticsearch,achow/elasticsearch,elancom/elasticsearch,wenpos/elasticsearch,luiseduardohdbackup/elasticsearch,ThalaivaStars/OrgRepo1,Siddartha07/elasticsearch,Brijeshrpatel9/elasticsearch,schonfeld/elasticsearch,lydonchandra/elasticsearch,tkssharma/elasticsearch,golubev/elasticsearch,mapr/elasticsearch,nrkkalyan/elasticsearch,cnfire/elasticsearch-1,mapr/elasticsearch,episerver/elasticsearch,AndreKR/elasticsearch,Widen/elasticsearch,AshishThakur/elasticsearch,jbertouch/elasticsearch,mkis-/elasticsearch,tsohil/elasticsearch,gfyoung/elasticsearch,kubum/elasticsearch,kunallimaye/elasticsearch,Brijeshrpatel9/elasticsearch,clintongormley/elasticsearch,ulkas/elasticsearch,skearns64/elasticsearch,mm0/elasticsearch,cnfire/elasticsearch-1,Fsero/elasticsearch,humandb/elasticsearch,sc0ttkclark/elasticsearch,mute/elasticsearch,mm0/elasticsearch,mkis-/elasticsearch,pozhidaevak/elasticsearch,kcompher/elasticsearch,masterweb121/elasticsearch,sauravmondallive/elasticsearch,tkssharma/elasticsearch,wbowling/elasticsearch,ydsakyclguozi/elasticsearch,szroland/elasticsearch,Helen-Zhao/elasticsearch,artnowo/elasticsearch,tebriel/elasticsearch,LeoYao/elasticsearch,huanzhong/elasticsearch,ImpressTV/elasticsearch,qwerty4030/elasticsearch,chirilo/elasticsearch,Ansh90/elasticsearch,sneivandt/elasticsearch,vietlq/elasticsearch,huanzhong/elasticsearch,iamjakob/elasticsearch,loconsolutions/elasticsearch,sc0ttkclark/elasticsearch,achow/elasticsearch,rajanm/elasticsearch,camilojd/elasticsearch,strapdata/elassandra,yuy168/elasticsearch,xpandan/elasticsearch,wangtuo/elasticsearch,coding0011/elasticsearch,MjAbuz/elasticsearch,mikemccand/elasticsearch,maddin2016/elasticsearch,vietlq/elasticsearch,Widen/elasticsearch,kalburgimanjunath/elasticsearch,Chhunlong/elasticsearch,markharwood/elasticsearch,schonfeld/elasticsearch,Fsero/elasticsearch,markllama/elasticsearch,avikurapati/elasticsearch,trangvh/elasticsearch,iamjakob/elasticsearch,tsohil/elasticsearch,HarishAtGitHub/elasticsearch,tebriel/elasticsearch,gmarz/elasticsearch,Siddartha07/elasticsearch,ivansun1010/elasticsearch,KimTaehee/elasticsearch,glefloch/elasticsearch,zeroctu/elasticsearch,hanswang/elasticsearch,dataduke/elasticsearch,myelin/elasticsearch,coding0011/elasticsearch,thecocce/elasticsearch,MjAbuz/elasticsearch,camilojd/elasticsearch,ricardocerq/elasticsearch,wbowling/elasticsearch,GlenRSmith/elasticsearch,javachengwc/elasticsearch,mm0/elasticsearch,Siddartha07/elasticsearch,hanswang/elasticsearch,PhaedrusTheGreek/elasticsearch,infusionsoft/elasticsearch,thecocce/elasticsearch,girirajsharma/elasticsearch,Ansh90/elasticsearch,easonC/elasticsearch,wenpos/elasticsearch,ThiagoGarciaAlves/elasticsearch,MichaelLiZhou/elasticsearch,lchennup/elasticsearch,Chhunlong/elasticsearch,yuy168/elasticsearch,lzo/elasticsearch-1,himanshuag/elasticsearch,szroland/elasticsearch,kalburgimanjunath/elasticsearch,KimTaehee/elasticsearch,lmtwga/elasticsearch,vroyer/elasticassandra,Rygbee/elasticsearch,bawse/elasticsearch,cnfire/elasticsearch-1,C-Bish/elasticsearch,rlugojr/elasticsearch,MisterAndersen/elasticsearch,apepper/elasticsearch,strapdata/elassandra-test,acchen97/elasticsearch,aglne/elasticsearch,vvcephei/elasticsearch,ImpressTV/elasticsearch,MichaelLiZhou/elasticsearch,xuzha/elasticsearch,andrestc/elasticsearch,mmaracic/elasticsearch,mute/elasticsearch,masterweb121/elasticsearch,iacdingping/elasticsearch,jango2015/elasticsearch,vvcephei/elasticsearch,pritishppai/elasticsearch,kenshin233/elasticsearch,rento19962/elasticsearch,pablocastro/elasticsearch,palecur/elasticsearch,jchampion/elasticsearch,kcompher/elasticsearch,milodky/elasticsearch,strapdata/elassandra5-rc,mute/elasticsearch,sreeramjayan/elasticsearch,franklanganke/elasticsearch,kalimatas/elasticsearch,chirilo/elasticsearch,pozhidaevak/elasticsearch,C-Bish/elasticsearch,kevinkluge/elasticsearch,JervyShi/elasticsearch,sauravmondallive/elasticsearch,mjhennig/elasticsearch,beiske/elasticsearch,rento19962/elasticsearch,kubum/elasticsearch,xuzha/elasticsearch,hechunwen/elasticsearch,amaliujia/elasticsearch,sarwarbhuiyan/elasticsearch,fernandozhu/elasticsearch,sarwarbhuiyan/elasticsearch,yynil/elasticsearch,areek/elasticsearch,MichaelLiZhou/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,truemped/elasticsearch,lydonchandra/elasticsearch,pranavraman/elasticsearch,Flipkart/elasticsearch,skearns64/elasticsearch,chirilo/elasticsearch,LeoYao/elasticsearch,yongminxia/elasticsearch,xingguang2013/elasticsearch,ouyangkongtong/elasticsearch,gmarz/elasticsearch,AshishThakur/elasticsearch,dongjoon-hyun/elasticsearch,Helen-Zhao/elasticsearch
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.io.Resources; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.*; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.DirectCandidateGenerator; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.junit.Test; import java.io.IOException; import java.util.*; import java.util.concurrent.ExecutionException; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; import static org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.candidateGenerator; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.*; /** * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that * request, modify again, request again, etc. This makes it very obvious what changes between requests. */ public class SuggestSearchTests extends ElasticsearchIntegrationTest { @Test // see #3196 public void testSuggestAcrossMultipleIndices() throws IOException { createIndex("test"); ensureGreen(); index("test", "type1", "1", "text", "abcd"); index("test", "type1", "2", "text", "aacd"); index("test", "type1", "3", "text", "abbd"); index("test", "type1", "4", "text", "abcc"); refresh(); TermSuggestionBuilder termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); logger.info("--> run suggestions with one index"); searchSuggest( termSuggest); createIndex("test_1"); ensureGreen(); index("test_1", "type1", "1", "text", "ab cd"); index("test_1", "type1", "2", "text", "aa cd"); index("test_1", "type1", "3", "text", "ab bd"); index("test_1", "type1", "4", "text", "ab cc"); refresh(); termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("ab cd") .minWordLength(1) .field("text"); logger.info("--> run suggestions with two indices"); searchSuggest( termSuggest); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("text").field("type", "string").field("analyzer", "keyword").endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test_2").addMapping("type1", mapping)); ensureGreen(); index("test_2", "type1", "1", "text", "ab cd"); index("test_2", "type1", "2", "text", "aa cd"); index("test_2", "type1", "3", "text", "ab bd"); index("test_2", "type1", "4", "text", "ab cc"); index("test_2", "type1", "1", "text", "abcd"); index("test_2", "type1", "2", "text", "aacd"); index("test_2", "type1", "3", "text", "abbd"); index("test_2", "type1", "4", "text", "abcc"); refresh(); termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("ab cd") .minWordLength(1) .field("text"); logger.info("--> run suggestions with three indices"); try { searchSuggest( termSuggest); fail(" can not suggest across multiple indices with different analysis chains"); } catch (ReduceSearchPhaseException ex) { assertThat(ex.getCause(), instanceOf(IllegalStateException.class)); assertThat(ex.getCause().getMessage(), anyOf(endsWith("Suggest entries have different sizes actual [1] expected [2]"), endsWith("Suggest entries have different sizes actual [2] expected [1]"))); } catch (IllegalStateException ex) { assertThat(ex.getMessage(), anyOf(endsWith("Suggest entries have different sizes actual [1] expected [2]"), endsWith("Suggest entries have different sizes actual [2] expected [1]"))); } termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("ABCD") .minWordLength(1) .field("text"); logger.info("--> run suggestions with four indices"); try { searchSuggest( termSuggest); fail(" can not suggest across multiple indices with different analysis chains"); } catch (ReduceSearchPhaseException ex) { assertThat(ex.getCause(), instanceOf(IllegalStateException.class)); assertThat(ex.getCause().getMessage(), anyOf(endsWith("Suggest entries have different text actual [ABCD] expected [abcd]"), endsWith("Suggest entries have different text actual [abcd] expected [ABCD]"))); } catch (IllegalStateException ex) { assertThat(ex.getMessage(), anyOf(endsWith("Suggest entries have different text actual [ABCD] expected [abcd]"), endsWith("Suggest entries have different text actual [abcd] expected [ABCD]"))); } } @Test // see #3037 public void testSuggestModes() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0) .put("index.analysis.analyzer.biword.tokenizer", "standard") .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 3)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") .field("type", "multi_field") .startObject("fields") .startObject("name") .field("type", "string") .endObject() .startObject("shingled") .field("type", "string") .field("analyzer", "biword") .field("search_analyzer", "standard") .endObject() .endObject() .endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); index("test", "type1", "1", "name", "I like iced tea"); index("test", "type1", "2", "name", "I like tea."); index("test", "type1", "3", "name", "I like ice cream."); refresh(); DirectCandidateGenerator generator = candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2); PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled") .addCandidateGenerator(generator) .gramSize(3); Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion); assertSuggestion(searchSuggest, 0, "did_you_mean", "iced tea"); generator.suggestMode(null); searchSuggest = searchSuggest( "ice tea", phraseSuggestion); assertSuggestionSize(searchSuggest, 0, 0, "did_you_mean"); } @Test // see #2729 public void testSizeOneShard() throws Exception { prepareCreate("test").setSettings( SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).get(); ensureGreen(); for (int i = 0; i < 15; i++) { index("test", "type1", Integer.toString(i), "text", "abc" + i); } refresh(); SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellchecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); TermSuggestionBuilder termSuggestion = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("abcd") .field("text") .size(10); Suggest suggest = searchSuggest( termSuggestion); assertSuggestion(suggest, 0, "test", 10, "abc0"); termSuggestion.text("abcd").shardSize(5); suggest = searchSuggest( termSuggestion); assertSuggestion(suggest, 0, "test", 5, "abc0"); } @Test public void testUnmappedField() throws IOException, InterruptedException, ExecutionException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.biword.tokenizer", "standard") .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 3)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") .field("type", "multi_field") .startObject("fields") .startObject("name") .field("type", "string") .endObject() .startObject("shingled") .field("type", "string") .field("analyzer", "biword") .field("search_analyzer", "standard") .endObject() .endObject() .endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1").setSource("name", "I like iced tea"), client().prepareIndex("test", "type1").setSource("name", "I like tea."), client().prepareIndex("test", "type1").setSource("name", "I like ice cream.")); refresh(); PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2)) .gramSize(3); Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion); assertSuggestion(searchSuggest, 0, 0, "did_you_mean", "iced tea"); phraseSuggestion.field("nosuchField"); { SearchRequestBuilder suggestBuilder = client().prepareSearch().setSize(0); suggestBuilder.setSuggestText("tetsting sugestion"); suggestBuilder.addSuggestion(phraseSuggestion); assertThrows(suggestBuilder, SearchPhaseExecutionException.class); } { SearchRequestBuilder suggestBuilder = client().prepareSearch().setSize(0); suggestBuilder.setSuggestText("tetsting sugestion"); suggestBuilder.addSuggestion(phraseSuggestion); assertThrows(suggestBuilder, SearchPhaseExecutionException.class); } } @Test public void testSimple() throws Exception { createIndex("test"); ensureGreen(); index("test", "type1", "1", "text", "abcd"); index("test", "type1", "2", "text", "aacd"); index("test", "type1", "3", "text", "abbd"); index("test", "type1", "4", "text", "abcc"); refresh(); SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellcecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); TermSuggestionBuilder termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); Suggest suggest = searchSuggest( termSuggest); assertSuggestion(suggest, 0, "test", "aacd", "abbd", "abcc"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); suggest = searchSuggest( termSuggest); assertSuggestion(suggest, 0, "test", "aacd","abbd", "abcc"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); } @Test public void testEmpty() throws Exception { createIndex("test"); ensureGreen(); index("test", "type1", "1", "foo", "bar"); refresh(); TermSuggestionBuilder termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); Suggest suggest = searchSuggest( termSuggest); assertSuggestionSize(suggest, 0, 0, "test"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); suggest = searchSuggest( termSuggest); assertSuggestionSize(suggest, 0, 0, "test"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); } @Test public void testWithMultipleCommands() throws Exception { createIndex("test"); ensureGreen(); index("test", "typ1", "1", "field1", "prefix_abcd", "field2", "prefix_efgh"); index("test", "typ1", "2", "field1", "prefix_aacd", "field2", "prefix_eeeh"); index("test", "typ1", "3", "field1", "prefix_abbd", "field2", "prefix_efff"); index("test", "typ1", "4", "field1", "prefix_abcc", "field2", "prefix_eggg"); refresh(); Suggest suggest = searchSuggest( termSuggestion("size1") .size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0) .field("field1").suggestMode("always"), termSuggestion("field2") .field("field2").text("prefix_eeeh prefix_efgh") .maxTermFreq(10).minDocFreq(0).suggestMode("always"), termSuggestion("accuracy") .field("field2").text("prefix_efgh").setAccuracy(1f) .maxTermFreq(10).minDocFreq(0).suggestMode("always")); assertSuggestion(suggest, 0, "size1", "prefix_aacd"); assertThat(suggest.getSuggestion("field2").getEntries().get(0).getText().string(), equalTo("prefix_eeeh")); assertSuggestion(suggest, 0, "field2", "prefix_efgh"); assertThat(suggest.getSuggestion("field2").getEntries().get(1).getText().string(), equalTo("prefix_efgh")); assertSuggestion(suggest, 1, "field2", "prefix_eeeh", "prefix_efff", "prefix_eggg"); assertSuggestionSize(suggest, 0, 0, "accuracy"); } @Test public void testSizeAndSort() throws Exception { createIndex("test"); ensureGreen(); Map<String, Integer> termsAndDocCount = new HashMap<>(); termsAndDocCount.put("prefix_aaad", 20); termsAndDocCount.put("prefix_abbb", 18); termsAndDocCount.put("prefix_aaca", 16); termsAndDocCount.put("prefix_abba", 14); termsAndDocCount.put("prefix_accc", 12); termsAndDocCount.put("prefix_addd", 10); termsAndDocCount.put("prefix_abaa", 8); termsAndDocCount.put("prefix_dbca", 6); termsAndDocCount.put("prefix_cbad", 4); termsAndDocCount.put("prefix_aacd", 1); termsAndDocCount.put("prefix_abcc", 1); termsAndDocCount.put("prefix_accd", 1); for (Map.Entry<String, Integer> entry : termsAndDocCount.entrySet()) { for (int i = 0; i < entry.getValue(); i++) { index("test", "type1", entry.getKey() + i, "field1", entry.getKey()); } } refresh(); Suggest suggest = searchSuggest( "prefix_abcd", termSuggestion("size3SortScoreFirst") .size(3).minDocFreq(0).field("field1").suggestMode("always"), termSuggestion("size10SortScoreFirst") .size(10).minDocFreq(0).field("field1").suggestMode("always").shardSize(50), termSuggestion("size3SortScoreFirstMaxEdits1") .maxEdits(1) .size(10).minDocFreq(0).field("field1").suggestMode("always"), termSuggestion("size10SortFrequencyFirst") .size(10).sort("frequency").shardSize(1000) .minDocFreq(0).field("field1").suggestMode("always")); // The commented out assertions fail sometimes because suggestions are based off of shard frequencies instead of index frequencies. assertSuggestion(suggest, 0, "size3SortScoreFirst", "prefix_aacd", "prefix_abcc", "prefix_accd"); assertSuggestion(suggest, 0, "size10SortScoreFirst", 10, "prefix_aacd", "prefix_abcc", "prefix_accd" /*, "prefix_aaad" */); assertSuggestion(suggest, 0, "size3SortScoreFirstMaxEdits1", "prefix_aacd", "prefix_abcc", "prefix_accd"); assertSuggestion(suggest, 0, "size10SortFrequencyFirst", "prefix_aaad", "prefix_abbb", "prefix_aaca", "prefix_abba", "prefix_accc", "prefix_addd", "prefix_abaa", "prefix_dbca", "prefix_cbad", "prefix_aacd"); // assertThat(suggest.get(3).getSuggestedWords().get("prefix_abcd").get(4).getTerm(), equalTo("prefix_abcc")); // assertThat(suggest.get(3).getSuggestedWords().get("prefix_abcd").get(4).getTerm(), equalTo("prefix_accd")); } @Test // see #2817 public void testStopwordsOnlyPhraseSuggest() throws IOException { assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=string,analyzer=stopwd").setSettings( settingsBuilder() .put("index.analysis.analyzer.stopwd.tokenizer", "whitespace") .putArray("index.analysis.analyzer.stopwd.filter", "stop") )); ensureGreen(); index("test", "typ1", "1", "body", "this is a test"); refresh(); Suggest searchSuggest = searchSuggest( "a an the", phraseSuggestion("simple_phrase").field("body").gramSize(1) .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); } @Test public void testPrefixLength() throws IOException { // Stopped here CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put("index.analysis.analyzer.reverse.tokenizer", "standard") .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse") .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject() .startObject("properties") .startObject("body").field("type", "string").field("analyzer", "body").endObject() .startObject("body_reverse").field("type", "string").field("analyzer", "reverse").endObject() .startObject("bigram").field("type", "string").field("analyzer", "bigram").endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); index("test", "type1", "1", "body", "hello world"); index("test", "type1", "2", "body", "hello world"); index("test", "type1", "3", "body", "hello words"); refresh(); Suggest searchSuggest = searchSuggest( "hello word", phraseSuggestion("simple_phrase").field("body") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello words"); searchSuggest = searchSuggest( "hello word", phraseSuggestion("simple_phrase").field("body") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello world"); } @Test @Slow @Nightly public void testMarvelHerosPhraseSuggest() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.reverse.tokenizer", "standard") .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse") .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all") .field("store", "yes") .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") .startObject("body"). field("type", "string"). field("analyzer", "body") .endObject() .startObject("body_reverse"). field("type", "string"). field("analyzer", "reverse") .endObject() .startObject("bigram"). field("type", "string"). field("analyzer", "bigram") .endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); for (String line: Resources.readLines(SuggestSearchTests.class.getResource("/config/names.txt"), Charsets.UTF_8)) { index("test", "type1", line, "body", line, "body_reverse", line, "bigram", line); } refresh(); PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("simple_phrase") .field("bigram").gramSize(2).analyzer("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1); Suggest searchSuggest = searchSuggest( "american ame", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace"); assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("american ame")); phraseSuggest.realWordErrorLikelihood(0.95f); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // Check the "text" field this one time. assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel")); // Ask for highlighting phraseSuggest.highlight("<em>", "</em>"); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getOptions().get(0).getHighlighted().string(), equalTo("<em>xorr</em> the <em>god</em> jewel")); // pass in a correct phrase phraseSuggest.highlight(null, null).confidence(0f).size(1).maxErrors(0.5f); searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // pass in a correct phrase - set confidence to 2 phraseSuggest.confidence(2f); searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // pass in a correct phrase - set confidence to 0.99 phraseSuggest.confidence(0.99f); searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); //test reverse suggestions with pre & post filter phraseSuggest .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .addCandidateGenerator(candidateGenerator("body_reverse").minWordLength(1).suggestMode("always").preFilter("reverse").postFilter("reverse")); searchSuggest = searchSuggest( "xor the yod-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // set all mass to trigrams (not indexed) phraseSuggest.clearCandidateGenerators() .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(1,0,0)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // set all mass to bigrams phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0,1,0)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // distribute mass phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0.4,0.4,0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); searchSuggest = searchSuggest( "american ame", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace"); // try all smoothing methods phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0.4,0.4,0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.Laplace(0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // check tokenLimit phraseSuggest.smoothingModel(null).tokenLimit(4); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); phraseSuggest.tokenLimit(15).smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)); searchSuggest = searchSuggest( "Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel xorr the god jewel xorr the god jewel"); // Check the name this time because we're repeating it which is funky assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel")); } @Test public void testSizePararm() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put("index.analysis.analyzer.reverse.tokenizer", "standard") .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse") .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("_all") .field("store", "yes") .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") .startObject("body") .field("type", "string") .field("analyzer", "body") .endObject() .startObject("body_reverse") .field("type", "string") .field("analyzer", "reverse") .endObject() .startObject("bigram") .field("type", "string") .field("analyzer", "bigram") .endObject() .endObject() .endObject() .endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); String line = "xorr the god jewel"; index("test", "type1", "1", "body", line, "body_reverse", line, "bigram", line); line = "I got it this time"; index("test", "type1", "2", "body", line, "body_reverse", line, "bigram", line); refresh(); PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("simple_phrase") .realWordErrorLikelihood(0.95f) .field("bigram") .gramSize(2) .analyzer("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(1).accuracy(0.1f)) .smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)) .maxErrors(1.0f) .size(5); Suggest searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // we allow a size of 2 now on the shard generator level so "god" will be found since it's LD2 phraseSuggestion.clearCandidateGenerators() .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(2).accuracy(0.1f)); searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); } @Test @Nightly public void testPhraseBoundaryCases() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 1) // to get reliable statistics we should put this all into one shard .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.analyzer.ngram.tokenizer", "standard") .putArray("index.analysis.analyzer.ngram.filter", "my_shingle2", "lowercase") .put("index.analysis.analyzer.myDefAnalyzer.tokenizer", "standard") .putArray("index.analysis.analyzer.myDefAnalyzer.filter", "shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2) .put("index.analysis.filter.my_shingle2.type", "shingle") .put("index.analysis.filter.my_shingle2.output_unigrams", true) .put("index.analysis.filter.my_shingle2.min_shingle_size", 2) .put("index.analysis.filter.my_shingle2.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject().startObject("type1") .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject() .startObject("properties") .startObject("body").field("type", "string").field("analyzer", "body").endObject() .startObject("bigram").field("type", "string").field("analyzer", "bigram").endObject() .startObject("ngram").field("type", "string").field("analyzer", "ngram").endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); for (String line: Resources.readLines(SuggestSearchTests.class.getResource("/config/names.txt"), Charsets.UTF_8)) { index("test", "type1", line, "body", line, "bigram", line, "ngram", line); } refresh(); NumShards numShards = getNumShards("test"); // Lets make sure some things throw exceptions PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("simple_phrase") .field("bigram") .analyzer("body") .addCandidateGenerator(candidateGenerator("does_not_exist").minWordLength(1).suggestMode("always")) .realWordErrorLikelihood(0.95f) .maxErrors(0.5f) .size(1); try { searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); fail("field does not exists"); } catch (SearchPhaseExecutionException e) {} phraseSuggestion.clearCandidateGenerators().analyzer(null); try { searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); fail("analyzer does only produce ngrams"); } catch (SearchPhaseExecutionException e) { } phraseSuggestion.analyzer("bigram"); try { searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); fail("analyzer does only produce ngrams"); } catch (SearchPhaseExecutionException e) { } // Now we'll make sure some things don't phraseSuggestion.forceUnigrams(false); searchSuggest( "Xor the Got-Jewel", phraseSuggestion); // Field doesn't produce unigrams but the analyzer does phraseSuggestion.forceUnigrams(true).field("bigram").analyzer("ngram"); searchSuggest( "Xor the Got-Jewel", phraseSuggestion); phraseSuggestion.field("ngram").analyzer("myDefAnalyzer") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")); Suggest suggest = searchSuggest( "Xor the Got-Jewel", phraseSuggestion); // "xorr the god jewel" and and "xorn the god jewel" have identical scores (we are only using unigrams to score), so we tie break by // earlier term (xorn): assertSuggestion(suggest, 0, "simple_phrase", "xorn the god jewel"); phraseSuggestion.analyzer(null); suggest = searchSuggest( "Xor the Got-Jewel", phraseSuggestion); // In this case xorr has a better score than xorn because we set the field back to the default (my_shingle2) analyzer, so the // probability that the term is not in the dictionary but is NOT a misspelling is relatively high in this case compared to the // others that have no n-gram with the other terms in the phrase :) you can set this realWorldErrorLikelyhood assertSuggestion(suggest, 0, "simple_phrase", "xorr the god jewel"); } @Test public void testDifferentShardSize() throws Exception { createIndex("test"); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "foobar1").setRouting("1"), client().prepareIndex("test", "type1", "2").setSource("field1", "foobar2").setRouting("2"), client().prepareIndex("test", "type1", "3").setSource("field1", "foobar3").setRouting("3")); Suggest suggest = searchSuggest( "foobar", termSuggestion("simple") .size(10).minDocFreq(0).field("field1").suggestMode("always")); ElasticsearchAssertions.assertSuggestionSize(suggest, 0, 3, "simple"); } @Test // see #3469 public void testShardFailures() throws IOException, InterruptedException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.suggest.tokenizer", "standard") .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) .put("index.analysis.filter.shingler.output_unigrams", true)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("name") .field("type", "multi_field") .startObject("fields") .startObject("name") .field("type", "string") .field("analyzer", "suggest") .endObject() .endObject() .endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type2", mapping)); ensureGreen(); index("test", "type2", "1", "foo", "bar"); index("test", "type2", "2", "foo", "bar"); index("test", "type2", "3", "foo", "bar"); index("test", "type2", "4", "foo", "bar"); index("test", "type2", "5", "foo", "bar"); index("test", "type2", "1", "name", "Just testing the suggestions api"); index("test", "type2", "2", "name", "An other title about equal length"); // Note that the last document has to have about the same length as the other or cutoff rechecking will remove the useful suggestion. refresh(); // When searching on a shard with a non existing mapping, we should fail SearchRequestBuilder request = client().prepareSearch().setSize(0) .setSuggestText("tetsting sugestion") .addSuggestion(phraseSuggestion("did_you_mean").field("fielddoesnotexist").maxErrors(5.0f)); assertThrows(request, SearchPhaseExecutionException.class); // When searching on a shard which does not hold yet any document of an existing type, we should not fail SearchResponse searchResponse = client().prepareSearch().setSize(0) .setSuggestText("tetsting sugestion") .addSuggestion(phraseSuggestion("did_you_mean").field("name").maxErrors(5.0f)) .get(); ElasticsearchAssertions.assertNoFailures(searchResponse); ElasticsearchAssertions.assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions"); } @Test // see #3469 public void testEmptyShards() throws IOException, InterruptedException { XContentBuilder mappingBuilder = XContentFactory.jsonBuilder(). startObject(). startObject("type1"). startObject("properties"). startObject("name"). field("type", "multi_field"). startObject("fields"). startObject("name"). field("type", "string"). field("analyzer", "suggest"). endObject(). endObject(). endObject(). endObject(). endObject(). endObject(); assertAcked(prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.suggest.tokenizer", "standard") .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) .put("index.analysis.filter.shingler.output_unigrams", true)).addMapping("type1", mappingBuilder)); ensureGreen(); index("test", "type2", "1", "foo", "bar"); index("test", "type2", "2", "foo", "bar"); index("test", "type1", "1", "name", "Just testing the suggestions api"); index("test", "type1", "2", "name", "An other title about equal length"); refresh(); SearchResponse searchResponse = client().prepareSearch() .setSize(0) .setSuggestText("tetsting sugestion") .addSuggestion(phraseSuggestion("did_you_mean").field("name").maxErrors(5.0f)) .get(); assertNoFailures(searchResponse); assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions"); } /** * Searching for a rare phrase shouldn't provide any suggestions if confidence > 1. This was possible before we rechecked the cutoff * score during the reduce phase. Failures don't occur every time - maybe two out of five tries but we don't repeat it to save time. */ @Test public void testSearchForRarePhrase() throws IOException { // If there isn't enough chaf per shard then shards can become unbalanced, making the cutoff recheck this is testing do more harm then good. int chafPerShard = 100; CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("_all") .field("store", "yes") .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") .startObject("body") .field("type", "string") .field("analyzer", "body") .endObject() .endObject() .endObject() .endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); NumShards test = getNumShards("test"); List<String> phrases = new ArrayList<>(); Collections.addAll(phrases, "nobel prize", "noble gases", "somethingelse prize", "pride and joy", "notes are fun"); for (int i = 0; i < 8; i++) { phrases.add("noble somethingelse" + i); } for (int i = 0; i < test.numPrimaries * chafPerShard; i++) { phrases.add("chaff" + i); } for (String phrase: phrases) { index("test", "type1", phrase, "body", phrase); } refresh(); Suggest searchSuggest = searchSuggest("nobel prize", phraseSuggestion("simple_phrase") .field("body") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) .maxErrors(5f) .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); searchSuggest = searchSuggest("noble prize", phraseSuggestion("simple_phrase") .field("body") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) .maxErrors(5f) .size(1)); assertSuggestion(searchSuggest, 0, 0, "simple_phrase", "nobel prize"); } /** * If the suggester finds tons of options then picking the right one is slow without <<<INSERT SOLUTION HERE>>>. */ @Test @Nightly public void suggestWithManyCandidates() throws InterruptedException, ExecutionException, IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. .put("index.analysis.analyzer.text.tokenizer", "standard") .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") .startObject("title") .field("type", "string") .field("analyzer", "text") .endObject() .endObject() .endObject() .endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); ImmutableList.Builder<String> titles = ImmutableList.<String>builder(); // We're going to be searching for: // united states house of representatives elections in washington 2006 // But we need to make sure we generate a ton of suggestions so we add a bunch of candidates. // Many of these candidates are drawn from page names on English Wikipedia. // Tons of different options very near the exact query term titles.add("United States House of Representatives Elections in Washington 1789"); for (int year = 1790; year < 2014; year+= 2) { titles.add("United States House of Representatives Elections in Washington " + year); } // Six of these are near enough to be viable suggestions, just not the top one // But we can't stop there! Titles that are just a year are pretty common so lets just add one per year // since 0. Why not? for (int year = 0; year < 2015; year++) { titles.add(Integer.toString(year)); } // That ought to provide more less good candidates for the last term // Now remove or add plural copies of every term we can titles.add("State"); titles.add("Houses of Parliament"); titles.add("Representative Government"); titles.add("Election"); // Now some possessive titles.add("Washington's Birthday"); // And some conjugation titles.add("Unified Modeling Language"); titles.add("Unite Against Fascism"); titles.add("Stated Income Tax"); titles.add("Media organizations housed within colleges"); // And other stuff titles.add("Untied shoelaces"); titles.add("Unit circle"); titles.add("Untitled"); titles.add("Unicef"); titles.add("Unrated"); titles.add("UniRed"); titles.add("Jalan Uniten–Dengkil"); // Highway in Malaysia titles.add("UNITAS"); titles.add("UNITER"); titles.add("Un-Led-Ed"); titles.add("STATS LLC"); titles.add("Staples"); titles.add("Skates"); titles.add("Statues of the Liberators"); titles.add("Staten Island"); titles.add("Statens Museum for Kunst"); titles.add("Hause"); // The last name or the German word, whichever. titles.add("Hose"); titles.add("Hoses"); titles.add("Howse Peak"); titles.add("The Hoose-Gow"); titles.add("Hooser"); titles.add("Electron"); titles.add("Electors"); titles.add("Evictions"); titles.add("Coronal mass ejection"); titles.add("Wasington"); // A film? titles.add("Warrington"); // A town in England titles.add("Waddington"); // Lots of places have this name titles.add("Watlington"); // Ditto titles.add("Waplington"); // Yup, also a town titles.add("Washing of the Spears"); // Book for (char c = 'A'; c <= 'Z'; c++) { // Can't forget lists, glorious lists! titles.add("List of former members of the United States House of Representatives (" + c + ")"); // Lots of people are named Washington <Middle Initial>. LastName titles.add("Washington " + c + ". Lastname"); // Lets just add some more to be evil titles.add("United " + c); titles.add("States " + c); titles.add("House " + c); titles.add("Elections " + c); titles.add("2006 " + c); titles.add(c + " United"); titles.add(c + " States"); titles.add(c + " House"); titles.add(c + " Elections"); titles.add(c + " 2006"); } List<IndexRequestBuilder> builders = new ArrayList<>(); for (String title: titles.build()) { builders.add(client().prepareIndex("test", "type1").setSource("title", title)); } indexRandom(true, builders); PhraseSuggestionBuilder suggest = phraseSuggestion("title") .field("title") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") .suggestMode("always") .maxTermFreq(.99f) .size(1000) // Setting a silly high size helps of generate a larger list of candidates for testing. .maxInspections(1000) // This too ) .confidence(0f) .maxErrors(2f) .shardSize(30000) .size(30000); Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006"); assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options suggest.size(1); long start = System.currentTimeMillis(); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); long total = System.currentTimeMillis() - start; assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006"); // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging } @Test public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionException, IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. .put("index.analysis.analyzer.text.tokenizer", "standard") .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") .startObject("title") .field("type", "string") .field("analyzer", "text") .endObject() .endObject() .endObject() .endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); ImmutableList.Builder<String> titles = ImmutableList.<String>builder(); titles.add("United States House of Representatives Elections in Washington 2006"); titles.add("United States House of Representatives Elections in Washington 2005"); titles.add("State"); titles.add("Houses of Parliament"); titles.add("Representative Government"); titles.add("Election"); List<IndexRequestBuilder> builders = new ArrayList<>(); for (String title: titles.build()) { builders.add(client().prepareIndex("test", "type1").setSource("title", title)); } indexRandom(true, builders); // suggest without filtering PhraseSuggestionBuilder suggest = phraseSuggestion("title") .field("title") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") .suggestMode("always") .maxTermFreq(.99f) .size(10) .maxInspections(200) ) .confidence(0f) .maxErrors(2f) .shardSize(30000) .size(10); Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); assertSuggestionSize(searchSuggest, 0, 10, "title"); // suggest with filtering String filterString = XContentFactory.jsonBuilder() .startObject() .startObject("match_phrase") .field("title", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredQuerySuggest); assertSuggestionSize(searchSuggest, 0, 2, "title"); // filtered suggest with no result (boundary case) searchSuggest = searchSuggest("Elections of Representatives Parliament", filteredQuerySuggest); assertSuggestionSize(searchSuggest, 0, 0, "title"); NumShards numShards = getNumShards("test"); // filtered suggest with bad query String incorrectFilterString = XContentFactory.jsonBuilder() .startObject() .startObject("test") .field("title", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, incorrectFilteredSuggest); fail("Post query error has been swallowed"); } catch(ElasticsearchException e) { // expected } // suggest with filter collation String filterStringAsFilter = XContentFactory.jsonBuilder() .startObject() .startObject("query") .startObject("match_phrase") .field("title", "{{suggestion}}") .endObject() .endObject() .endObject() .string(); PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(null).collateFilter(filterStringAsFilter); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredFilterSuggest); assertSuggestionSize(searchSuggest, 0, 2, "title"); // filtered suggest with bad filter String filterStr = XContentFactory.jsonBuilder() .startObject() .startObject("pprefix") .field("title", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder in = suggest.collateQuery(null).collateFilter(filterStr); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, in); fail("Post filter error has been swallowed"); } catch(ElasticsearchException e) { //expected } // collate script failure due to no additional params String collateWithParams = XContentFactory.jsonBuilder() .startObject() .startObject("{{query_type}}") .field("{{query_field}}", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateFilter(null).collateQuery(collateWithParams); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, phraseSuggestWithNoParams); fail("Malformed query (lack of additional params) should fail"); } catch (ElasticsearchException e) { // expected } // collate script with additional params Map<String, Object> params = new HashMap<>(); params.put("query_type", "match_phrase"); params.put("query_field", "title"); PhraseSuggestionBuilder phraseSuggestWithParams = suggest.collateFilter(null).collateQuery(collateWithParams).collateParams(params); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParams); assertSuggestionSize(searchSuggest, 0, 2, "title"); //collate request defining both query/filter should fail PhraseSuggestionBuilder phraseSuggestWithFilterAndQuery = suggest.collateFilter(filterStringAsFilter).collateQuery(filterString); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, phraseSuggestWithFilterAndQuery); fail("expected parse failure, as both filter and query are set in collate"); } catch (ElasticsearchException e) { // expected } // collate query request with prune set to true PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateFilter(null).collateQuery(collateWithParams).collateParams(params).collatePrune(true); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); assertSuggestionSize(searchSuggest, 0, 10, "title"); assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); collateWithParams = XContentFactory.jsonBuilder() .startObject() .startObject("query") .startObject("{{query_type}}") .field("{{query_field}}", "{{suggestion}}") .endObject() .endObject() .endObject().string(); params.clear(); params.put("query_type", "match_phrase"); params.put("query_field", "title"); // collate filter request with prune set to true phraseSuggestWithParamsAndReturn = suggest.collateFilter(collateWithParams).collateQuery(null).collateParams(params).collatePrune(true); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); assertSuggestionSize(searchSuggest, 0, 10, "title"); assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); } protected Suggest searchSuggest(SuggestionBuilder<?>... suggestion) { return searchSuggest(null, suggestion); } protected Suggest searchSuggest(String suggestText, SuggestionBuilder<?>... suggestions) { return searchSuggest(suggestText, 0, suggestions); } protected Suggest searchSuggest(String suggestText, int expectShardsFailed, SuggestionBuilder<?>... suggestions) { if (randomBoolean()) { SearchRequestBuilder builder = client().prepareSearch().setSize(0); if (suggestText != null) { builder.setSuggestText(suggestText); } for (SuggestionBuilder<?> suggestion : suggestions) { builder.addSuggestion(suggestion); } SearchResponse actionGet = builder.execute().actionGet(); assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(expectShardsFailed)); return actionGet.getSuggest(); } else { SuggestRequestBuilder builder = client().prepareSuggest(); if (suggestText != null) { builder.setSuggestText(suggestText); } for (SuggestionBuilder<?> suggestion : suggestions) { builder.addSuggestion(suggestion); } SuggestResponse actionGet = builder.execute().actionGet(); assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(expectShardsFailed)); if (expectShardsFailed > 0) { throw new SearchPhaseExecutionException("suggest", "Suggest execution failed", new ShardSearchFailure[0]); } return actionGet.getSuggest(); } } }
src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.io.Resources; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.*; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.suggest.SuggestResponse; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.DirectCandidateGenerator; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.junit.Test; import java.io.IOException; import java.util.*; import java.util.concurrent.ExecutionException; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; import static org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.candidateGenerator; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.*; /** * Integration tests for term and phrase suggestions. Many of these tests many requests that vary only slightly from one another. Where * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that * request, modify again, request again, etc. This makes it very obvious what changes between requests. */ public class SuggestSearchTests extends ElasticsearchIntegrationTest { @Test // see #3196 public void testSuggestAcrossMultipleIndices() throws IOException { createIndex("test"); ensureGreen(); index("test", "type1", "1", "text", "abcd"); index("test", "type1", "2", "text", "aacd"); index("test", "type1", "3", "text", "abbd"); index("test", "type1", "4", "text", "abcc"); refresh(); TermSuggestionBuilder termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); logger.info("--> run suggestions with one index"); searchSuggest( termSuggest); createIndex("test_1"); ensureGreen(); index("test_1", "type1", "1", "text", "ab cd"); index("test_1", "type1", "2", "text", "aa cd"); index("test_1", "type1", "3", "text", "ab bd"); index("test_1", "type1", "4", "text", "ab cc"); refresh(); termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("ab cd") .minWordLength(1) .field("text"); logger.info("--> run suggestions with two indices"); searchSuggest( termSuggest); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("text").field("type", "string").field("analyzer", "keyword").endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test_2").addMapping("type1", mapping)); ensureGreen(); index("test_2", "type1", "1", "text", "ab cd"); index("test_2", "type1", "2", "text", "aa cd"); index("test_2", "type1", "3", "text", "ab bd"); index("test_2", "type1", "4", "text", "ab cc"); index("test_2", "type1", "1", "text", "abcd"); index("test_2", "type1", "2", "text", "aacd"); index("test_2", "type1", "3", "text", "abbd"); index("test_2", "type1", "4", "text", "abcc"); refresh(); termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("ab cd") .minWordLength(1) .field("text"); logger.info("--> run suggestions with three indices"); try { searchSuggest( termSuggest); fail(" can not suggest across multiple indices with different analysis chains"); } catch (ReduceSearchPhaseException ex) { assertThat(ex.getCause(), instanceOf(IllegalStateException.class)); assertThat(ex.getCause().getMessage(), anyOf(endsWith("Suggest entries have different sizes actual [1] expected [2]"), endsWith("Suggest entries have different sizes actual [2] expected [1]"))); } catch (IllegalStateException ex) { assertThat(ex.getMessage(), anyOf(endsWith("Suggest entries have different sizes actual [1] expected [2]"), endsWith("Suggest entries have different sizes actual [2] expected [1]"))); } termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("ABCD") .minWordLength(1) .field("text"); logger.info("--> run suggestions with four indices"); try { searchSuggest( termSuggest); fail(" can not suggest across multiple indices with different analysis chains"); } catch (ReduceSearchPhaseException ex) { assertThat(ex.getCause(), instanceOf(IllegalStateException.class)); assertThat(ex.getCause().getMessage(), anyOf(endsWith("Suggest entries have different text actual [ABCD] expected [abcd]"), endsWith("Suggest entries have different text actual [abcd] expected [ABCD]"))); } catch (IllegalStateException ex) { assertThat(ex.getMessage(), anyOf(endsWith("Suggest entries have different text actual [ABCD] expected [abcd]"), endsWith("Suggest entries have different text actual [abcd] expected [ABCD]"))); } } @Test // see #3037 public void testSuggestModes() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0) .put("index.analysis.analyzer.biword.tokenizer", "standard") .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 3)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") .field("type", "multi_field") .startObject("fields") .startObject("name") .field("type", "string") .endObject() .startObject("shingled") .field("type", "string") .field("analyzer", "biword") .field("search_analyzer", "standard") .endObject() .endObject() .endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); index("test", "type1", "1", "name", "I like iced tea"); index("test", "type1", "2", "name", "I like tea."); index("test", "type1", "3", "name", "I like ice cream."); refresh(); DirectCandidateGenerator generator = candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2); PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled") .addCandidateGenerator(generator) .gramSize(3); Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion); assertSuggestion(searchSuggest, 0, "did_you_mean", "iced tea"); generator.suggestMode(null); searchSuggest = searchSuggest( "ice tea", phraseSuggestion); assertSuggestionSize(searchSuggest, 0, 0, "did_you_mean"); } @Test // see #2729 public void testSizeOneShard() throws Exception { prepareCreate("test").setSettings( SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).get(); ensureGreen(); for (int i = 0; i < 15; i++) { index("test", "type1", Integer.toString(i), "text", "abc" + i); } refresh(); SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellchecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); TermSuggestionBuilder termSuggestion = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("abcd") .field("text") .size(10); Suggest suggest = searchSuggest( termSuggestion); assertSuggestion(suggest, 0, "test", 10, "abc0"); termSuggestion.text("abcd").shardSize(5); suggest = searchSuggest( termSuggestion); assertSuggestion(suggest, 0, "test", 5, "abc0"); } @Test public void testUnmappedField() throws IOException, InterruptedException, ExecutionException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.biword.tokenizer", "standard") .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 3)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("name") .field("type", "multi_field") .startObject("fields") .startObject("name") .field("type", "string") .endObject() .startObject("shingled") .field("type", "string") .field("analyzer", "biword") .field("search_analyzer", "standard") .endObject() .endObject() .endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); indexRandom(true, client().prepareIndex("test", "type1").setSource("name", "I like iced tea"), client().prepareIndex("test", "type1").setSource("name", "I like tea."), client().prepareIndex("test", "type1").setSource("name", "I like ice cream.")); refresh(); PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2)) .gramSize(3); Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion); assertSuggestion(searchSuggest, 0, 0, "did_you_mean", "iced tea"); phraseSuggestion.field("nosuchField"); { SearchRequestBuilder suggestBuilder = client().prepareSearch().setSize(0); suggestBuilder.setSuggestText("tetsting sugestion"); suggestBuilder.addSuggestion(phraseSuggestion); assertThrows(suggestBuilder, SearchPhaseExecutionException.class); } { SearchRequestBuilder suggestBuilder = client().prepareSearch().setSize(0); suggestBuilder.setSuggestText("tetsting sugestion"); suggestBuilder.addSuggestion(phraseSuggestion); assertThrows(suggestBuilder, SearchPhaseExecutionException.class); } } @Test public void testSimple() throws Exception { createIndex("test"); ensureGreen(); index("test", "type1", "1", "text", "abcd"); index("test", "type1", "2", "text", "aacd"); index("test", "type1", "3", "text", "abbd"); index("test", "type1", "4", "text", "abcc"); refresh(); SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellcecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); TermSuggestionBuilder termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); Suggest suggest = searchSuggest( termSuggest); assertSuggestion(suggest, 0, "test", "aacd", "abbd", "abcc"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); suggest = searchSuggest( termSuggest); assertSuggestion(suggest, 0, "test", "aacd","abbd", "abcc"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); } @Test public void testEmpty() throws Exception { createIndex("test"); ensureGreen(); index("test", "type1", "1", "foo", "bar"); refresh(); TermSuggestionBuilder termSuggest = termSuggestion("test") .suggestMode("always") // Always, otherwise the results can vary between requests. .text("abcd") .field("text"); Suggest suggest = searchSuggest( termSuggest); assertSuggestionSize(suggest, 0, 0, "test"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); suggest = searchSuggest( termSuggest); assertSuggestionSize(suggest, 0, 0, "test"); assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd")); } @Test public void testWithMultipleCommands() throws Exception { createIndex("test"); ensureGreen(); index("test", "typ1", "1", "field1", "prefix_abcd", "field2", "prefix_efgh"); index("test", "typ1", "2", "field1", "prefix_aacd", "field2", "prefix_eeeh"); index("test", "typ1", "3", "field1", "prefix_abbd", "field2", "prefix_efff"); index("test", "typ1", "4", "field1", "prefix_abcc", "field2", "prefix_eggg"); refresh(); Suggest suggest = searchSuggest( termSuggestion("size1") .size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0) .field("field1").suggestMode("always"), termSuggestion("field2") .field("field2").text("prefix_eeeh prefix_efgh") .maxTermFreq(10).minDocFreq(0).suggestMode("always"), termSuggestion("accuracy") .field("field2").text("prefix_efgh").setAccuracy(1f) .maxTermFreq(10).minDocFreq(0).suggestMode("always")); assertSuggestion(suggest, 0, "size1", "prefix_aacd"); assertThat(suggest.getSuggestion("field2").getEntries().get(0).getText().string(), equalTo("prefix_eeeh")); assertSuggestion(suggest, 0, "field2", "prefix_efgh"); assertThat(suggest.getSuggestion("field2").getEntries().get(1).getText().string(), equalTo("prefix_efgh")); assertSuggestion(suggest, 1, "field2", "prefix_eeeh", "prefix_efff", "prefix_eggg"); assertSuggestionSize(suggest, 0, 0, "accuracy"); } @Test public void testSizeAndSort() throws Exception { createIndex("test"); ensureGreen(); Map<String, Integer> termsAndDocCount = new HashMap<>(); termsAndDocCount.put("prefix_aaad", 20); termsAndDocCount.put("prefix_abbb", 18); termsAndDocCount.put("prefix_aaca", 16); termsAndDocCount.put("prefix_abba", 14); termsAndDocCount.put("prefix_accc", 12); termsAndDocCount.put("prefix_addd", 10); termsAndDocCount.put("prefix_abaa", 8); termsAndDocCount.put("prefix_dbca", 6); termsAndDocCount.put("prefix_cbad", 4); termsAndDocCount.put("prefix_aacd", 1); termsAndDocCount.put("prefix_abcc", 1); termsAndDocCount.put("prefix_accd", 1); for (Map.Entry<String, Integer> entry : termsAndDocCount.entrySet()) { for (int i = 0; i < entry.getValue(); i++) { index("test", "type1", entry.getKey() + i, "field1", entry.getKey()); } } refresh(); Suggest suggest = searchSuggest( "prefix_abcd", termSuggestion("size3SortScoreFirst") .size(3).minDocFreq(0).field("field1").suggestMode("always"), termSuggestion("size10SortScoreFirst") .size(10).minDocFreq(0).field("field1").suggestMode("always").shardSize(50), termSuggestion("size3SortScoreFirstMaxEdits1") .maxEdits(1) .size(10).minDocFreq(0).field("field1").suggestMode("always"), termSuggestion("size10SortFrequencyFirst") .size(10).sort("frequency").shardSize(1000) .minDocFreq(0).field("field1").suggestMode("always")); // The commented out assertions fail sometimes because suggestions are based off of shard frequencies instead of index frequencies. assertSuggestion(suggest, 0, "size3SortScoreFirst", "prefix_aacd", "prefix_abcc", "prefix_accd"); assertSuggestion(suggest, 0, "size10SortScoreFirst", 10, "prefix_aacd", "prefix_abcc", "prefix_accd" /*, "prefix_aaad" */); assertSuggestion(suggest, 0, "size3SortScoreFirstMaxEdits1", "prefix_aacd", "prefix_abcc", "prefix_accd"); assertSuggestion(suggest, 0, "size10SortFrequencyFirst", "prefix_aaad", "prefix_abbb", "prefix_aaca", "prefix_abba", "prefix_accc", "prefix_addd", "prefix_abaa", "prefix_dbca", "prefix_cbad", "prefix_aacd"); // assertThat(suggest.get(3).getSuggestedWords().get("prefix_abcd").get(4).getTerm(), equalTo("prefix_abcc")); // assertThat(suggest.get(3).getSuggestedWords().get("prefix_abcd").get(4).getTerm(), equalTo("prefix_accd")); } @Test // see #2817 public void testStopwordsOnlyPhraseSuggest() throws IOException { assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=string,analyzer=stopwd").setSettings( settingsBuilder() .put("index.analysis.analyzer.stopwd.tokenizer", "whitespace") .putArray("index.analysis.analyzer.stopwd.filter", "stop") )); ensureGreen(); index("test", "typ1", "1", "body", "this is a test"); refresh(); Suggest searchSuggest = searchSuggest( "a an the", phraseSuggestion("simple_phrase").field("body").gramSize(1) .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); } @Test public void testPrefixLength() throws IOException { // Stopped here CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put("index.analysis.analyzer.reverse.tokenizer", "standard") .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse") .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject() .startObject("properties") .startObject("body").field("type", "string").field("analyzer", "body").endObject() .startObject("body_reverse").field("type", "string").field("analyzer", "reverse").endObject() .startObject("bigram").field("type", "string").field("analyzer", "bigram").endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); index("test", "type1", "1", "body", "hello world"); index("test", "type1", "2", "body", "hello world"); index("test", "type1", "3", "body", "hello words"); refresh(); Suggest searchSuggest = searchSuggest( "hello word", phraseSuggestion("simple_phrase").field("body") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello words"); searchSuggest = searchSuggest( "hello word", phraseSuggestion("simple_phrase").field("body") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always")) .size(1).confidence(1.0f)); assertSuggestion(searchSuggest, 0, "simple_phrase", "hello world"); } @Test @Slow @Nightly public void testMarvelHerosPhraseSuggest() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.reverse.tokenizer", "standard") .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse") .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all") .field("store", "yes") .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") .startObject("body"). field("type", "string"). field("analyzer", "body") .endObject() .startObject("body_reverse"). field("type", "string"). field("analyzer", "reverse") .endObject() .startObject("bigram"). field("type", "string"). field("analyzer", "bigram") .endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); for (String line: Resources.readLines(SuggestSearchTests.class.getResource("/config/names.txt"), Charsets.UTF_8)) { index("test", "type1", line, "body", line, "body_reverse", line, "bigram", line); } refresh(); PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("simple_phrase") .field("bigram").gramSize(2).analyzer("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .size(1); Suggest searchSuggest = searchSuggest( "american ame", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace"); assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("american ame")); phraseSuggest.realWordErrorLikelihood(0.95f); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // Check the "text" field this one time. assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel")); // Ask for highlighting phraseSuggest.highlight("<em>", "</em>"); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getOptions().get(0).getHighlighted().string(), equalTo("<em>xorr</em> the <em>god</em> jewel")); // pass in a correct phrase phraseSuggest.highlight(null, null).confidence(0f).size(1).maxErrors(0.5f); searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // pass in a correct phrase - set confidence to 2 phraseSuggest.confidence(2f); searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // pass in a correct phrase - set confidence to 0.99 phraseSuggest.confidence(0.99f); searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); //test reverse suggestions with pre & post filter phraseSuggest .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .addCandidateGenerator(candidateGenerator("body_reverse").minWordLength(1).suggestMode("always").preFilter("reverse").postFilter("reverse")); searchSuggest = searchSuggest( "xor the yod-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // set all mass to trigrams (not indexed) phraseSuggest.clearCandidateGenerators() .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")) .smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(1,0,0)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // set all mass to bigrams phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0,1,0)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // distribute mass phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0.4,0.4,0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); searchSuggest = searchSuggest( "american ame", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace"); // try all smoothing methods phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0.4,0.4,0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.Laplace(0.2)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); // check tokenLimit phraseSuggest.smoothingModel(null).tokenLimit(4); searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); phraseSuggest.tokenLimit(15).smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)); searchSuggest = searchSuggest( "Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel", phraseSuggest); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel xorr the god jewel xorr the god jewel"); // Check the name this time because we're repeating it which is funky assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel")); } @Test public void testSizePararm() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(SETTING_NUMBER_OF_SHARDS, 1) .put("index.analysis.analyzer.reverse.tokenizer", "standard") .putArray("index.analysis.analyzer.reverse.filter", "lowercase", "reverse") .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("_all") .field("store", "yes") .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") .startObject("body") .field("type", "string") .field("analyzer", "body") .endObject() .startObject("body_reverse") .field("type", "string") .field("analyzer", "reverse") .endObject() .startObject("bigram") .field("type", "string") .field("analyzer", "bigram") .endObject() .endObject() .endObject() .endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); String line = "xorr the god jewel"; index("test", "type1", "1", "body", line, "body_reverse", line, "bigram", line); line = "I got it this time"; index("test", "type1", "2", "body", line, "body_reverse", line, "bigram", line); refresh(); PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("simple_phrase") .realWordErrorLikelihood(0.95f) .field("bigram") .gramSize(2) .analyzer("body") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(1).accuracy(0.1f)) .smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1)) .maxErrors(1.0f) .size(5); Suggest searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); // we allow a size of 2 now on the shard generator level so "god" will be found since it's LD2 phraseSuggestion.clearCandidateGenerators() .addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(2).accuracy(0.1f)); searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion); assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel"); } @Test @Nightly public void testPhraseBoundaryCases() throws IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 1) // to get reliable statistics we should put this all into one shard .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase") .put("index.analysis.analyzer.bigram.tokenizer", "standard") .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase") .put("index.analysis.analyzer.ngram.tokenizer", "standard") .putArray("index.analysis.analyzer.ngram.filter", "my_shingle2", "lowercase") .put("index.analysis.analyzer.myDefAnalyzer.tokenizer", "standard") .putArray("index.analysis.analyzer.myDefAnalyzer.filter", "shingle", "lowercase") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", false) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2) .put("index.analysis.filter.my_shingle2.type", "shingle") .put("index.analysis.filter.my_shingle2.output_unigrams", true) .put("index.analysis.filter.my_shingle2.min_shingle_size", 2) .put("index.analysis.filter.my_shingle2.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject().startObject("type1") .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject() .startObject("properties") .startObject("body").field("type", "string").field("analyzer", "body").endObject() .startObject("bigram").field("type", "string").field("analyzer", "bigram").endObject() .startObject("ngram").field("type", "string").field("analyzer", "ngram").endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); for (String line: Resources.readLines(SuggestSearchTests.class.getResource("/config/names.txt"), Charsets.UTF_8)) { index("test", "type1", line, "body", line, "bigram", line, "ngram", line); } refresh(); NumShards numShards = getNumShards("test"); // Lets make sure some things throw exceptions PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("simple_phrase") .field("bigram") .analyzer("body") .addCandidateGenerator(candidateGenerator("does_not_exist").minWordLength(1).suggestMode("always")) .realWordErrorLikelihood(0.95f) .maxErrors(0.5f) .size(1); try { searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); fail("field does not exists"); } catch (SearchPhaseExecutionException e) {} phraseSuggestion.clearCandidateGenerators().analyzer(null); try { searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); fail("analyzer does only produce ngrams"); } catch (SearchPhaseExecutionException e) { } phraseSuggestion.analyzer("bigram"); try { searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion); fail("analyzer does only produce ngrams"); } catch (SearchPhaseExecutionException e) { } // Now we'll make sure some things don't phraseSuggestion.forceUnigrams(false); searchSuggest( "Xor the Got-Jewel", phraseSuggestion); // Field doesn't produce unigrams but the analyzer does phraseSuggestion.forceUnigrams(true).field("bigram").analyzer("ngram"); searchSuggest( "Xor the Got-Jewel", phraseSuggestion); phraseSuggestion.field("ngram").analyzer("myDefAnalyzer") .addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always")); Suggest suggest = searchSuggest( "Xor the Got-Jewel", phraseSuggestion); // "xorr the god jewel" and and "xorn the god jewel" have identical scores (we are only using unigrams to score), so we tie break by // earlier term (xorn): assertSuggestion(suggest, 0, "simple_phrase", "xorn the god jewel"); phraseSuggestion.analyzer(null); suggest = searchSuggest( "Xor the Got-Jewel", phraseSuggestion); // In this case xorr has a better score than xorn because we set the field back to the default (my_shingle2) analyzer, so the // probability that the term is not in the dictionary but is NOT a misspelling is relatively high in this case compared to the // others that have no n-gram with the other terms in the phrase :) you can set this realWorldErrorLikelyhood assertSuggestion(suggest, 0, "simple_phrase", "xorr the god jewel"); } @Test public void testDifferentShardSize() throws Exception { createIndex("test"); ensureGreen(); indexRandom(true, client().prepareIndex("text", "type1", "1").setSource("field1", "foobar1").setRouting("1"), client().prepareIndex("text", "type1", "2").setSource("field1", "foobar2").setRouting("2"), client().prepareIndex("text", "type1", "3").setSource("field1", "foobar3").setRouting("3")); Suggest suggest = searchSuggest( "foobar", termSuggestion("simple") .size(10).minDocFreq(0).field("field1").suggestMode("always")); ElasticsearchAssertions.assertSuggestionSize(suggest, 0, 3, "simple"); } @Test // see #3469 public void testShardFailures() throws IOException, InterruptedException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.suggest.tokenizer", "standard") .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) .put("index.analysis.filter.shingler.output_unigrams", true)); XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("name") .field("type", "multi_field") .startObject("fields") .startObject("name") .field("type", "string") .field("analyzer", "suggest") .endObject() .endObject() .endObject() .endObject() .endObject().endObject(); assertAcked(builder.addMapping("type2", mapping)); ensureGreen(); index("test", "type2", "1", "foo", "bar"); index("test", "type2", "2", "foo", "bar"); index("test", "type2", "3", "foo", "bar"); index("test", "type2", "4", "foo", "bar"); index("test", "type2", "5", "foo", "bar"); index("test", "type2", "1", "name", "Just testing the suggestions api"); index("test", "type2", "2", "name", "An other title about equal length"); // Note that the last document has to have about the same length as the other or cutoff rechecking will remove the useful suggestion. refresh(); // When searching on a shard with a non existing mapping, we should fail SearchRequestBuilder request = client().prepareSearch().setSize(0) .setSuggestText("tetsting sugestion") .addSuggestion(phraseSuggestion("did_you_mean").field("fielddoesnotexist").maxErrors(5.0f)); assertThrows(request, SearchPhaseExecutionException.class); // When searching on a shard which does not hold yet any document of an existing type, we should not fail SearchResponse searchResponse = client().prepareSearch().setSize(0) .setSuggestText("tetsting sugestion") .addSuggestion(phraseSuggestion("did_you_mean").field("name").maxErrors(5.0f)) .get(); ElasticsearchAssertions.assertNoFailures(searchResponse); ElasticsearchAssertions.assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions"); } @Test // see #3469 public void testEmptyShards() throws IOException, InterruptedException { XContentBuilder mappingBuilder = XContentFactory.jsonBuilder(). startObject(). startObject("type1"). startObject("properties"). startObject("name"). field("type", "multi_field"). startObject("fields"). startObject("name"). field("type", "string"). field("analyzer", "suggest"). endObject(). endObject(). endObject(). endObject(). endObject(). endObject(); assertAcked(prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.suggest.tokenizer", "standard") .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) .put("index.analysis.filter.shingler.output_unigrams", true)).addMapping("type1", mappingBuilder)); ensureGreen(); index("test", "type2", "1", "foo", "bar"); index("test", "type2", "2", "foo", "bar"); index("test", "type1", "1", "name", "Just testing the suggestions api"); index("test", "type1", "2", "name", "An other title about equal length"); refresh(); SearchResponse searchResponse = client().prepareSearch() .setSize(0) .setSuggestText("tetsting sugestion") .addSuggestion(phraseSuggestion("did_you_mean").field("name").maxErrors(5.0f)) .get(); assertNoFailures(searchResponse); assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions"); } /** * Searching for a rare phrase shouldn't provide any suggestions if confidence > 1. This was possible before we rechecked the cutoff * score during the reduce phase. Failures don't occur every time - maybe two out of five tries but we don't repeat it to save time. */ @Test public void testSearchForRarePhrase() throws IOException { // If there isn't enough chaf per shard then shards can become unbalanced, making the cutoff recheck this is testing do more harm then good. int chafPerShard = 100; CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.body.tokenizer", "standard") .putArray("index.analysis.analyzer.body.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 2)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("_all") .field("store", "yes") .field("termVector", "with_positions_offsets") .endObject() .startObject("properties") .startObject("body") .field("type", "string") .field("analyzer", "body") .endObject() .endObject() .endObject() .endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); NumShards test = getNumShards("test"); List<String> phrases = new ArrayList<>(); Collections.addAll(phrases, "nobel prize", "noble gases", "somethingelse prize", "pride and joy", "notes are fun"); for (int i = 0; i < 8; i++) { phrases.add("noble somethingelse" + i); } for (int i = 0; i < test.numPrimaries * chafPerShard; i++) { phrases.add("chaff" + i); } for (String phrase: phrases) { index("test", "type1", phrase, "body", phrase); } refresh(); Suggest searchSuggest = searchSuggest("nobel prize", phraseSuggestion("simple_phrase") .field("body") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) .maxErrors(5f) .size(1)); assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase"); searchSuggest = searchSuggest("noble prize", phraseSuggestion("simple_phrase") .field("body") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f)) .confidence(2f) .maxErrors(5f) .size(1)); assertSuggestion(searchSuggest, 0, 0, "simple_phrase", "nobel prize"); } /** * If the suggester finds tons of options then picking the right one is slow without <<<INSERT SOLUTION HERE>>>. */ @Test @Nightly public void suggestWithManyCandidates() throws InterruptedException, ExecutionException, IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. .put("index.analysis.analyzer.text.tokenizer", "standard") .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") .startObject("title") .field("type", "string") .field("analyzer", "text") .endObject() .endObject() .endObject() .endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); ImmutableList.Builder<String> titles = ImmutableList.<String>builder(); // We're going to be searching for: // united states house of representatives elections in washington 2006 // But we need to make sure we generate a ton of suggestions so we add a bunch of candidates. // Many of these candidates are drawn from page names on English Wikipedia. // Tons of different options very near the exact query term titles.add("United States House of Representatives Elections in Washington 1789"); for (int year = 1790; year < 2014; year+= 2) { titles.add("United States House of Representatives Elections in Washington " + year); } // Six of these are near enough to be viable suggestions, just not the top one // But we can't stop there! Titles that are just a year are pretty common so lets just add one per year // since 0. Why not? for (int year = 0; year < 2015; year++) { titles.add(Integer.toString(year)); } // That ought to provide more less good candidates for the last term // Now remove or add plural copies of every term we can titles.add("State"); titles.add("Houses of Parliament"); titles.add("Representative Government"); titles.add("Election"); // Now some possessive titles.add("Washington's Birthday"); // And some conjugation titles.add("Unified Modeling Language"); titles.add("Unite Against Fascism"); titles.add("Stated Income Tax"); titles.add("Media organizations housed within colleges"); // And other stuff titles.add("Untied shoelaces"); titles.add("Unit circle"); titles.add("Untitled"); titles.add("Unicef"); titles.add("Unrated"); titles.add("UniRed"); titles.add("Jalan Uniten–Dengkil"); // Highway in Malaysia titles.add("UNITAS"); titles.add("UNITER"); titles.add("Un-Led-Ed"); titles.add("STATS LLC"); titles.add("Staples"); titles.add("Skates"); titles.add("Statues of the Liberators"); titles.add("Staten Island"); titles.add("Statens Museum for Kunst"); titles.add("Hause"); // The last name or the German word, whichever. titles.add("Hose"); titles.add("Hoses"); titles.add("Howse Peak"); titles.add("The Hoose-Gow"); titles.add("Hooser"); titles.add("Electron"); titles.add("Electors"); titles.add("Evictions"); titles.add("Coronal mass ejection"); titles.add("Wasington"); // A film? titles.add("Warrington"); // A town in England titles.add("Waddington"); // Lots of places have this name titles.add("Watlington"); // Ditto titles.add("Waplington"); // Yup, also a town titles.add("Washing of the Spears"); // Book for (char c = 'A'; c <= 'Z'; c++) { // Can't forget lists, glorious lists! titles.add("List of former members of the United States House of Representatives (" + c + ")"); // Lots of people are named Washington <Middle Initial>. LastName titles.add("Washington " + c + ". Lastname"); // Lets just add some more to be evil titles.add("United " + c); titles.add("States " + c); titles.add("House " + c); titles.add("Elections " + c); titles.add("2006 " + c); titles.add(c + " United"); titles.add(c + " States"); titles.add(c + " House"); titles.add(c + " Elections"); titles.add(c + " 2006"); } List<IndexRequestBuilder> builders = new ArrayList<>(); for (String title: titles.build()) { builders.add(client().prepareIndex("test", "type1").setSource("title", title)); } indexRandom(true, builders); PhraseSuggestionBuilder suggest = phraseSuggestion("title") .field("title") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") .suggestMode("always") .maxTermFreq(.99f) .size(1000) // Setting a silly high size helps of generate a larger list of candidates for testing. .maxInspections(1000) // This too ) .confidence(0f) .maxErrors(2f) .shardSize(30000) .size(30000); Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006"); assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options suggest.size(1); long start = System.currentTimeMillis(); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); long total = System.currentTimeMillis() - start; assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006"); // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging } @Test public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionException, IOException { CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(settingsBuilder() .put(indexSettings()) .put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable. .put("index.analysis.analyzer.text.tokenizer", "standard") .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle") .put("index.analysis.filter.my_shingle.type", "shingle") .put("index.analysis.filter.my_shingle.output_unigrams", true) .put("index.analysis.filter.my_shingle.min_shingle_size", 2) .put("index.analysis.filter.my_shingle.max_shingle_size", 3)); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() .startObject("type1") .startObject("properties") .startObject("title") .field("type", "string") .field("analyzer", "text") .endObject() .endObject() .endObject() .endObject(); assertAcked(builder.addMapping("type1", mapping)); ensureGreen(); ImmutableList.Builder<String> titles = ImmutableList.<String>builder(); titles.add("United States House of Representatives Elections in Washington 2006"); titles.add("United States House of Representatives Elections in Washington 2005"); titles.add("State"); titles.add("Houses of Parliament"); titles.add("Representative Government"); titles.add("Election"); List<IndexRequestBuilder> builders = new ArrayList<>(); for (String title: titles.build()) { builders.add(client().prepareIndex("test", "type1").setSource("title", title)); } indexRandom(true, builders); // suggest without filtering PhraseSuggestionBuilder suggest = phraseSuggestion("title") .field("title") .addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title") .suggestMode("always") .maxTermFreq(.99f) .size(10) .maxInspections(200) ) .confidence(0f) .maxErrors(2f) .shardSize(30000) .size(10); Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest); assertSuggestionSize(searchSuggest, 0, 10, "title"); // suggest with filtering String filterString = XContentFactory.jsonBuilder() .startObject() .startObject("match_phrase") .field("title", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredQuerySuggest); assertSuggestionSize(searchSuggest, 0, 2, "title"); // filtered suggest with no result (boundary case) searchSuggest = searchSuggest("Elections of Representatives Parliament", filteredQuerySuggest); assertSuggestionSize(searchSuggest, 0, 0, "title"); NumShards numShards = getNumShards("test"); // filtered suggest with bad query String incorrectFilterString = XContentFactory.jsonBuilder() .startObject() .startObject("test") .field("title", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, incorrectFilteredSuggest); fail("Post query error has been swallowed"); } catch(ElasticsearchException e) { // expected } // suggest with filter collation String filterStringAsFilter = XContentFactory.jsonBuilder() .startObject() .startObject("query") .startObject("match_phrase") .field("title", "{{suggestion}}") .endObject() .endObject() .endObject() .string(); PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(null).collateFilter(filterStringAsFilter); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredFilterSuggest); assertSuggestionSize(searchSuggest, 0, 2, "title"); // filtered suggest with bad filter String filterStr = XContentFactory.jsonBuilder() .startObject() .startObject("pprefix") .field("title", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder in = suggest.collateQuery(null).collateFilter(filterStr); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, in); fail("Post filter error has been swallowed"); } catch(ElasticsearchException e) { //expected } // collate script failure due to no additional params String collateWithParams = XContentFactory.jsonBuilder() .startObject() .startObject("{{query_type}}") .field("{{query_field}}", "{{suggestion}}") .endObject() .endObject() .string(); PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateFilter(null).collateQuery(collateWithParams); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, phraseSuggestWithNoParams); fail("Malformed query (lack of additional params) should fail"); } catch (ElasticsearchException e) { // expected } // collate script with additional params Map<String, Object> params = new HashMap<>(); params.put("query_type", "match_phrase"); params.put("query_field", "title"); PhraseSuggestionBuilder phraseSuggestWithParams = suggest.collateFilter(null).collateQuery(collateWithParams).collateParams(params); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParams); assertSuggestionSize(searchSuggest, 0, 2, "title"); //collate request defining both query/filter should fail PhraseSuggestionBuilder phraseSuggestWithFilterAndQuery = suggest.collateFilter(filterStringAsFilter).collateQuery(filterString); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, phraseSuggestWithFilterAndQuery); fail("expected parse failure, as both filter and query are set in collate"); } catch (ElasticsearchException e) { // expected } // collate query request with prune set to true PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateFilter(null).collateQuery(collateWithParams).collateParams(params).collatePrune(true); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); assertSuggestionSize(searchSuggest, 0, 10, "title"); assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); collateWithParams = XContentFactory.jsonBuilder() .startObject() .startObject("query") .startObject("{{query_type}}") .field("{{query_field}}", "{{suggestion}}") .endObject() .endObject() .endObject().string(); params.clear(); params.put("query_type", "match_phrase"); params.put("query_field", "title"); // collate filter request with prune set to true phraseSuggestWithParamsAndReturn = suggest.collateFilter(collateWithParams).collateQuery(null).collateParams(params).collatePrune(true); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); assertSuggestionSize(searchSuggest, 0, 10, "title"); assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); } protected Suggest searchSuggest(SuggestionBuilder<?>... suggestion) { return searchSuggest(null, suggestion); } protected Suggest searchSuggest(String suggestText, SuggestionBuilder<?>... suggestions) { return searchSuggest(suggestText, 0, suggestions); } protected Suggest searchSuggest(String suggestText, int expectShardsFailed, SuggestionBuilder<?>... suggestions) { if (randomBoolean()) { SearchRequestBuilder builder = client().prepareSearch().setSize(0); if (suggestText != null) { builder.setSuggestText(suggestText); } for (SuggestionBuilder<?> suggestion : suggestions) { builder.addSuggestion(suggestion); } SearchResponse actionGet = builder.execute().actionGet(); assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(expectShardsFailed)); return actionGet.getSuggest(); } else { SuggestRequestBuilder builder = client().prepareSuggest(); if (suggestText != null) { builder.setSuggestText(suggestText); } for (SuggestionBuilder<?> suggestion : suggestions) { builder.addSuggestion(suggestion); } SuggestResponse actionGet = builder.execute().actionGet(); assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(expectShardsFailed)); if (expectShardsFailed > 0) { throw new SearchPhaseExecutionException("suggest", "Suggest execution failed", new ShardSearchFailure[0]); } return actionGet.getSuggest(); } } }
[TEST] use correct index name created for this test
src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java
[TEST] use correct index name created for this test
Java
apache-2.0
77f7bec4b39e09ed4543e27c5a650cbde58a6bd9
0
filiphr/camunda-bpm-platform,plexiti/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,jangalinski/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,langfr/camunda-bpm-platform,joansmith/camunda-bpm-platform,holisticon/camunda-bpm-platform,tcrossland/camunda-bpm-platform,jangalinski/camunda-bpm-platform,skjolber/camunda-bpm-platform,holisticon/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,plexiti/camunda-bpm-platform,xasx/camunda-bpm-platform,langfr/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,1and1/camunda-bpm-platform,hupda-edpe/c,camunda/camunda-bpm-platform,1and1/camunda-bpm-platform,menski/camunda-bpm-platform,camunda/camunda-bpm-platform,Sumitdahiya/camunda,skjolber/camunda-bpm-platform,xasx/camunda-bpm-platform,rainerh/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,skjolber/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,menski/camunda-bpm-platform,joansmith/camunda-bpm-platform,filiphr/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,langfr/camunda-bpm-platform,falko/camunda-bpm-platform,rainerh/camunda-bpm-platform,bentrm/camunda-bpm-platform,tcrossland/camunda-bpm-platform,joansmith/camunda-bpm-platform,jangalinski/camunda-bpm-platform,skjolber/camunda-bpm-platform,tcrossland/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,xasx/camunda-bpm-platform,tcrossland/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,nibin/camunda-bpm-platform,filiphr/camunda-bpm-platform,fouasnon/camunda-bpm-platform,filiphr/camunda-bpm-platform,clintmanning/new-empty,holisticon/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,hupda-edpe/c,clintmanning/new-empty,ingorichtsmeier/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,Sumitdahiya/camunda,LuisePufahl/camunda-bpm-platform_batchProcessing,plexiti/camunda-bpm-platform,holisticon/camunda-bpm-platform,skjolber/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,langfr/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,falko/camunda-bpm-platform,joansmith/camunda-bpm-platform,camunda/camunda-bpm-platform,1and1/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,hupda-edpe/c,plexiti/camunda-bpm-platform,rainerh/camunda-bpm-platform,xasx/camunda-bpm-platform,Sumitdahiya/camunda,filiphr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,tcrossland/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,menski/camunda-bpm-platform,rainerh/camunda-bpm-platform,menski/camunda-bpm-platform,plexiti/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,jangalinski/camunda-bpm-platform,clintmanning/new-empty,falko/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,Sumitdahiya/camunda,fouasnon/camunda-bpm-platform,jangalinski/camunda-bpm-platform,bentrm/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,Sumitdahiya/camunda,hupda-edpe/c,fouasnon/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,xasx/camunda-bpm-platform,Sumitdahiya/camunda,holisticon/camunda-bpm-platform,hupda-edpe/c,falko/camunda-bpm-platform,holisticon/camunda-bpm-platform,fouasnon/camunda-bpm-platform,fouasnon/camunda-bpm-platform,tcrossland/camunda-bpm-platform,camunda/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,fouasnon/camunda-bpm-platform,joansmith/camunda-bpm-platform,1and1/camunda-bpm-platform,skjolber/camunda-bpm-platform,langfr/camunda-bpm-platform,joansmith/camunda-bpm-platform,plexiti/camunda-bpm-platform,langfr/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,camunda/camunda-bpm-platform,camunda/camunda-bpm-platform,falko/camunda-bpm-platform,filiphr/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,menski/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,ingorichtsmeier/camunda-bpm-platform,nibin/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,nibin/camunda-bpm-platform,bentrm/camunda-bpm-platform,xasx/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,bentrm/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,rainerh/camunda-bpm-platform,rainerh/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,nibin/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,bentrm/camunda-bpm-platform,bentrm/camunda-bpm-platform,nibin/camunda-bpm-platform,hupda-edpe/c,nibin/camunda-bpm-platform,falko/camunda-bpm-platform
package com.camunda.fox.cycle.connector.signavio; import static org.custommonkey.xmlunit.XMLAssert.assertXpathEvaluatesTo; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Scanner; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.inject.Inject; import org.activiti.engine.impl.bpmn.parser.BpmnParse; import org.activiti.engine.impl.bpmn.parser.BpmnParser; import org.activiti.engine.impl.cfg.BpmnParseFactory; import org.activiti.engine.impl.cfg.DefaultBpmnParseFactory; import org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl; import org.activiti.engine.impl.context.Context; import org.activiti.engine.impl.el.ExpressionManager; import org.activiti.engine.impl.persistence.entity.DeploymentEntity; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.custommonkey.xmlunit.DetailedDiff; import org.custommonkey.xmlunit.Diff; import org.custommonkey.xmlunit.ElementNameAndAttributeQualifier; import org.custommonkey.xmlunit.SimpleNamespaceContext; import org.custommonkey.xmlunit.XMLUnit; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import com.camunda.fox.cycle.connector.ConnectorNode; import com.camunda.fox.cycle.connector.ConnectorNodeType; import com.camunda.fox.cycle.entity.ConnectorConfiguration; import com.camunda.fox.cycle.roundtrip.BpmnProcessModelUtil; import com.camunda.fox.cycle.roundtrip.XsltTransformer; import com.camunda.fox.cycle.util.BpmnNamespaceContext; import com.camunda.fox.cycle.util.IoUtil; import com.camunda.fox.cycle.util.XmlUtil; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:/spring/test/signavio-connector-xml-config.xml" }) public class SignavioConnectorIT { /** * Files generated for each test are: * initial-raw -> * technical -> * changed-technical (only when you replace something) -> * before-deploy-technical (only when you deploy to engine) -> * merge-source-technical -> * merge-source-business -> * merge-result -> * actual-raw -> * expected-raw -> * xml-diff */ private static final String INITIAL_RAW_BPMN20_XML = "initial-raw.bpmn"; private static final String TECHNICAL_BPMN20_XML = "technical.bpmn"; private static final String CHANGED_TECHNICAL_BPMN20_XML = "changed-technical.bpmn"; private static final String BEFORE_DEPLOY_TECHNICAL_BPMN20_XML = "before-deploy-technical.bpmn"; private static final String ACTUAL_RAW_BPMN20_XML = "actual-raw.bpmn"; private static final String EXPECTED_RAW_BPMN20_XML = "expected-raw.bpmn"; private static final String XML_DIFF_TXT = "xml-diff.txt"; private static final String MODEL_FOLDER = "models"; private static final String DEBUG_DIR = "target/failsafe-reports"; private static final String TEST_RESOURCES_PATH = "src/test/resources/" + MODEL_FOLDER + "/"; private static final boolean OVERWRITE_EXPECTED_BPMN_FILES = false; private static final String[] testBpmnModels = new String[] { "MyProcess.bpmn", "SimpleProcurementExample.bpmn", "TwitterDemoProcess.bpmn", "TwitterDemoProcess-business-rule-task.bpmn", "TwitterDemoProcess-business-rule-task.dev-friendly.2011-09-01.bpmn" }; private BpmnProcessModelUtil bpmnProcessModelUtil = new BpmnProcessModelUtil(); @Inject private ProcessEngineConfigurationImpl processEngineConfiguration; @Inject private SignavioConnector signavioConnector; @Before public void setUp() throws Exception { XMLUnit.setIgnoreWhitespace(true); XMLUnit.setIgnoreAttributeOrder(true); XMLUnit.setXpathNamespaceContext(new SimpleNamespaceContext(new BpmnNamespaceContext().getNamespaces())); ConnectorConfiguration config = this.getSignavioConnector().getConfiguration(); this.getSignavioConnector().init(config); this.getSignavioConnector().login(config.getGlobalUser(), config.getGlobalPassword()); } @After public void tearDown() throws Exception { this.getSignavioConnector().dispose(); } @Test public void testActivitiExtensionsImportExport() throws Exception { final String ACT_ELEMENT_COUNT = "count(//*[namespace-uri() = 'http://activiti.org/bpmn'])"; final String ACT_ATTRIBUTE_COUNT = "count(//@*[namespace-uri() = 'http://activiti.org/bpmn'])"; for (String model : readModels()){ String expectedXml = model; String expectedElementCount = XmlUtil.getXPathResult(ACT_ELEMENT_COUNT, new InputSource(IOUtils.toInputStream(expectedXml, "UTF-8"))); String expectedAttributeCount = XmlUtil.getXPathResult(ACT_ATTRIBUTE_COUNT, new InputSource(IOUtils.toInputStream(expectedXml, "UTF-8"))); ConnectorNode importedNode = this.getSignavioConnector().importContent(this.getSignavioConnector().getPrivateFolder(), expectedXml); InputStream actualXmlInputStream = this.getSignavioConnector().getContent(importedNode); String actualXml = IOUtils.toString(actualXmlInputStream, "UTF-8"); actualXmlInputStream.close(); this.getSignavioConnector().deleteNode(importedNode); assertXpathEvaluatesTo(expectedElementCount, ACT_ELEMENT_COUNT, actualXml); assertXpathEvaluatesTo(expectedAttributeCount, ACT_ATTRIBUTE_COUNT, actualXml); } } @Test public void testImportTechnicalModel() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("MyProcess_executable.bpmn"); } @Test public void testRoundtripWithMessageEvents() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("testRoundtripWithMessageEvents.sgx", "<serviceTask\\sid=\"(test(_\\d*))\"\\sname=\"test\"/>", "<serviceTask id=\"testTwitter\" name=\"testTwitter\" activiti:class=\"com.camunda.fox.demo.twitter.TweetContentDelegate\" />"); } @Test public void test_HEMERA_1319() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1319.sgx", "<serviceTask\\sid=\"(Task(_\\d*))\"/>", "<serviceTask id=\"Task_abc\" name=\"Task_abc\" activiti:class=\"com.camunda.fox.demo.pdf.SavePdfDelegate\" />"); } @Test public void test_HEMERA_1610() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1610.sgx", "<serviceTask\\sid=\"(PDF_in_SVN_ablegen(_\\d*))\"\\sname=\"PDF\\sin\\sSVN\\sablegen\"/>", "<serviceTask id=\"PDF_in_SVN_ablegen_abc\" name=\"PDF in SVN ablegen\" activiti:class=\"com.camunda.fox.demo.pdf.SavePdfDelegate\" />"); } @Test public void test_HEMERA_1348_RoundtripWithMessageEventsAdvanced() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1348.sgx"); } @Test public void test_HEMERA_1791_changeEngineProcessId() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791-Collaboration.bpmn", "<process\\sid=\"(HEMERA-1791-Collaboration_)\"\\sisExecutable=\"true\"\\s*name=\"Process\\sEngine\">", "<process id=\"changed-processid-HEMERA-1791-blub-Collaboration_\" isClosed=\"true\" isExecutable=\"true\" name=\"Process Engine\">"); } @Test public void test_HEMERA_1791_changeEngineProcessName() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791-Collaboration.bpmn", "<process\\sid=\"HEMERA-1791-Collaboration_\"\\sisExecutable=\"true\"\\s*name=\"Process\\sEngine\">", "<process id=\"HEMERA-1791-Collaboration_\" isExecutable=\"true\" name=\"changed-processname-Process Engine\">"); } @Test public void test_HEMERA_1791_changeEngineProcessIdAndName() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791-Collaboration.bpmn", "<process\\sid=\"(HEMERA-1791-Collaboration_)\"\\sisExecutable=\"true\"\\s*name=\"Process\\sEngine\">", "<process id=\"changed-processid-HEMERA-1791-blub-Collaboration_\" isExecutable=\"true\" name=\"change-processname-Process Engine\">"); } @Test public void test_HEMERA_2219() throws Exception { String roundtripResult = bpmnPoolExtractionRoundtrip("HEMERA-2219.sgx", false, null, null, true, "HEMERA-2219-import.bpmn"); assertXpathEvaluatesTo("2", "count(//bpmn:message)", roundtripResult); assertXpathEvaluatesTo("2", "count(//bpmn:error)", roundtripResult); } @Test public void test_HEMERA_2379() throws Exception { String businessModel = bpmnPoolExtractionRoundtrip("HEMERA-2379.sgx", false, null, null, false, null); // initial x/y-coordinates of messageflow-endpoint (messageflow from non technical pool to technical pool) assertXpathEvaluatesTo("1840.0", "//bpmndi:BPMNEdge[@bpmnElement='messageFlow_24']/omgdi:waypoint[last()]/@x", businessModel); assertXpathEvaluatesTo("852.0", "//bpmndi:BPMNEdge[@bpmnElement='messageFlow_24']/omgdi:waypoint[last()]/@y", businessModel); // initial x/y-coordinates of messageflow-startpoint (messageflow from technical pool to non technical pool) assertXpathEvaluatesTo("1807.0", "//bpmndi:BPMNEdge[@bpmnElement='sid-3630ECEC-DC84-43F3-8373-6EE89E88DA7A']/omgdi:waypoint[1]/@x", businessModel); assertXpathEvaluatesTo("852.0", "//bpmndi:BPMNEdge[@bpmnElement='sid-3630ECEC-DC84-43F3-8373-6EE89E88DA7A']/omgdi:waypoint[1]/@y", businessModel); } @Test public void test_HEMERA_2379_withImport() throws Exception { String roundtripResult = bpmnPoolExtractionRoundtrip("HEMERA-2379.sgx", false, null, null, true, "HEMERA-2379-import.bpmn"); // x/y-coordinates of the messageflow-endpoint after the import of the changed technical model (messageflow from non technical pool to technical pool) assertXpathEvaluatesTo("1840.0", "//bpmndi:BPMNEdge[@bpmnElement='messageFlow_24']/omgdi:waypoint[last()]/@x", roundtripResult); assertXpathEvaluatesTo("757.0", "//bpmndi:BPMNEdge[@bpmnElement='messageFlow_24']/omgdi:waypoint[last()]/@y", roundtripResult); // the message flow-target is now the pool-border // x/y-coordinates of the messageflow-startpoint after the import of the changed technical model (messageflow from technical pool to non technical pool) assertXpathEvaluatesTo("1807.0", "//bpmndi:BPMNEdge[@bpmnElement='sid-3630ECEC-DC84-43F3-8373-6EE89E88DA7A']/omgdi:waypoint[1]/@x", roundtripResult); assertXpathEvaluatesTo("757.0", "//bpmndi:BPMNEdge[@bpmnElement='sid-3630ECEC-DC84-43F3-8373-6EE89E88DA7A']/omgdi:waypoint[1]/@y", roundtripResult); // the message flow-source is now the pool-border assertXpathEvaluatesTo("AutomatedAcquisition", "//bpmn:message[1]/@name", roundtripResult); assertXpathEvaluatesTo("ManualAcquisition", "//bpmn:message[2]/@name", roundtripResult); } @Test public void test_HEMERA_1791() throws Exception { // shows also that pool is removed if it's the only participant in collaboration bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791.bpmn"); } @Test public void test_HEMERA_1791_Collaboration() throws Exception { // shows also that pool is retained if it's no the only participant in collaboration bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791-Collaboration.bpmn"); } @Test public void test_HEMERA_1820() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1820.bpmn"); } @Test public void test_HEMERA_1821() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1821.bpmn"); } @Test public void test_HEMERA_1942() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1942.sgx"); } @Ignore(value="Disabled because of HEMERA-3125") @Test public void test_ErrorBoundaryEventsInSubProcess() throws Exception { bpmnPoolExtractionRoundtrip("SubprocessBoundaryEventBug.sgx", false, null, null, true, null); } // -------------------------------- CONVIENCE METHODS ------------------------------ // private void bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy(String filename) throws Exception { bpmnPoolExtractionRoundtripWithEngineDeploy(filename, true, null, null); } private void bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy(String filename, String replaceRegex, String withReplacement) throws Exception { bpmnPoolExtractionRoundtripWithEngineDeploy(filename, true, replaceRegex, withReplacement); } private void bpmnPoolExtractionRoundtripWithEngineDeploy(String filename, boolean devFriendly, String replaceRegex, String withReplacement) throws Exception { bpmnPoolExtractionRoundtrip(filename, devFriendly, replaceRegex, withReplacement, true); } private void bpmnPoolExtractionRoundtrip(String filename, boolean devFriendly, String replaceRegex, String withReplacement, boolean deployToEngine) throws Exception { bpmnPoolExtractionRoundtrip(filename, devFriendly, replaceRegex, withReplacement, deployToEngine, null); } /** * Helper which allows to import a signavioArchive or a bpmn model into * Signavio and does the bpmn roundtrip. * * @param filename * the name of the file * @param devFriendly * should the model be made developer friendly * @param replaceRegex * replace the given string * @param withReplacement * with this string * @param deployToEngine * should the exported technical model deployed to engine */ private String bpmnPoolExtractionRoundtrip(String filename, boolean devFriendly, String replaceRegex, String withReplacement, boolean deployToEngine, String importXmlFile) throws Exception { // enable writing of results to files IoUtil.DEBUG = true; IoUtil.DEBUG_DIR = DEBUG_DIR; String initialRawBpmn20Xml = null; // the initial business model String technicalModel = null; String actualRawBpmn20Xml = null; // the business model after completed roundtrip InputStream initialRawBpmn20XmlInputStream = null; // the initial business model InputStream technicalModelInputStream = null; InputStream actualRawBpmn20XmlInputStream = null; // the business model after completed roundtrip ConnectorNode folder = null; try { // create directory String folderName = "Cycle: SignavioConnectorIT.testBpmnPoolExtractionRoundtrip " + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()); folder = this.getSignavioConnector().createNode(this.getSignavioConnector().getPrivateFolder().getId(), folderName, ConnectorNodeType.FOLDER); // upload model if (filename.endsWith(".sgx")) { getSignavioConnector().importSignavioArchive(folder, TEST_RESOURCES_PATH + filename); } else if (filename.endsWith(".bpmn")) { getSignavioConnector().importContent(folder, readModel(filename), filename.substring(0, filename.indexOf("."))); } else { fail("Unable to determine type of file to upload! [File=" + filename + "]"); } List<ConnectorNode> models = this.getSignavioConnector().getChildren(folder); for (ConnectorNode node : models) { assertTrue(node instanceof ConnectorNode); ConnectorNode model = (ConnectorNode) node; System.out.println("Testing Pool Extraction Roundtrip with model '" + model.getLabel() + "'..."); initialRawBpmn20XmlInputStream = this.getSignavioConnector().getContent(model); initialRawBpmn20Xml = IOUtils.toString(initialRawBpmn20XmlInputStream, "UTF-8"); IoUtil.closeSilently(initialRawBpmn20XmlInputStream); IoUtil.writeStringToFileIfDebug(initialRawBpmn20Xml, "initial_raw_model", INITIAL_RAW_BPMN20_XML); // ActivitiCompliantBpmn20Provider.writeStringToFileIfDebug(model, initialRawBpmn20Xml, INITIAL_RAW_BPMN20_XML); // export (developer-friendly) BPMN 2.0 XML of Engine Pool if (importXmlFile == null) { if (devFriendly) { String enginePoolId = model.getLabel().replaceFirst("^[^a-zA-Z]", "z").replaceAll("[^a-zA-Z0-9-]", "_").concat("_"); initialRawBpmn20XmlInputStream = IOUtils.toInputStream(initialRawBpmn20Xml, "UTF-8"); ByteArrayOutputStream devFriendlyOutput = XsltTransformer.instance().developerFriendly(initialRawBpmn20XmlInputStream, enginePoolId, true); initialRawBpmn20Xml = devFriendlyOutput.toString("UTF-8"); IoUtil.closeSilently(initialRawBpmn20XmlInputStream); IoUtil.closeSilently(devFriendlyOutput); } initialRawBpmn20XmlInputStream = IOUtils.toInputStream(initialRawBpmn20Xml, "UTF-8"); technicalModelInputStream = this.bpmnProcessModelUtil.extractExecutablePool(initialRawBpmn20XmlInputStream); technicalModel = IOUtils.toString(technicalModelInputStream, "UTF-8"); IoUtil.closeSilently(technicalModelInputStream); IoUtil.closeSilently(initialRawBpmn20XmlInputStream); } else { technicalModel = readModel(importXmlFile); } IoUtil.writeStringToFileIfDebug(technicalModel, "technical_model", TECHNICAL_BPMN20_XML); // do some changes in the model if (replaceRegex != null && !replaceRegex.isEmpty() && withReplacement != null) { technicalModel = changeTechnicalModel(replaceRegex, withReplacement, technicalModel); IoUtil.writeStringToFileIfDebug(technicalModel, "changed_technical_model", CHANGED_TECHNICAL_BPMN20_XML); } // test if technical model deploys to engine if (deployToEngine) { IoUtil.writeStringToFileIfDebug(technicalModel, "technical_model_before_deploy", BEFORE_DEPLOY_TECHNICAL_BPMN20_XML); validateActivitiDeployable(technicalModel, filename); } // import Engine Pool back into collaboration actualRawBpmn20Xml = this.bpmnProcessModelUtil.importChangesFromExecutableBpmnModel(technicalModel, initialRawBpmn20Xml); actualRawBpmn20XmlInputStream = IOUtils.toInputStream(actualRawBpmn20Xml, "UTF-8"); this.getSignavioConnector().updateContent(model, actualRawBpmn20XmlInputStream); IoUtil.closeSilently(actualRawBpmn20XmlInputStream); actualRawBpmn20XmlInputStream = this.getSignavioConnector().getContent(model); actualRawBpmn20Xml = IOUtils.toString(actualRawBpmn20XmlInputStream, "UTF-8"); IoUtil.closeSilently(actualRawBpmn20XmlInputStream); IoUtil.writeStringToFileIfDebug(actualRawBpmn20Xml, "actual_model", ACTUAL_RAW_BPMN20_XML); // compare result with a previous result stored in TEST_RESOURCES_PATH assertRoundtripResultCorrect(filename, importXmlFile, model, replaceRegex, withReplacement, actualRawBpmn20Xml); } } finally { if (folder != null) { // delete folder this.getSignavioConnector().deleteNode(folder); } } return actualRawBpmn20Xml; // last result } private void assertRoundtripResultCorrect(String filename, String importXmlFile, ConnectorNode model, String replaceRegex, String withReplacement, String actualRawBpmn20Xml) { String changeId = DigestUtils.md5Hex(replaceRegex + withReplacement); String expectedRawBpmn20XmlFileName = TEST_RESOURCES_PATH + filename + "+" + importXmlFile + "_" + model.getLabel() + "_change-" + changeId + "_" + EXPECTED_RAW_BPMN20_XML; try { File expectedRawBpmn20XmlFile = new File(expectedRawBpmn20XmlFileName); if (OVERWRITE_EXPECTED_BPMN_FILES) { FileUtils.writeStringToFile(expectedRawBpmn20XmlFile, actualRawBpmn20Xml, "UTF-8"); fail("The assertions of this test only work if SignavioConnectorIT#OVERWRITE_EXPECTED_BPMN_FILES is set to false."); } String expectedRawBpmn20Xml = FileUtils.readFileToString(expectedRawBpmn20XmlFile, "UTF-8").replace("\r", ""); // remove carriage returns in case the files have been fetched via Git on Windows IoUtil.writeStringToFileIfDebug(expectedRawBpmn20Xml, "expected_model", EXPECTED_RAW_BPMN20_XML); // just for convenient comparison DetailedDiff details = compareSignavioBpmn20Xml(expectedRawBpmn20Xml, actualRawBpmn20Xml); IoUtil.writeStringToFileIfDebug("Comparison:" + "\n" + details.toString(), "comparison_details", XML_DIFF_TXT); // show non-recoverable differences if the assertion fails assertTrue("Comparison:" + "\n" + details.toString().replaceAll("\\[not identical\\] [^\n]+\n", "").replaceAll("\n\n+", "\n"), details.similar()); } catch (IOException e) { throw new RuntimeException("Unable to read or write expected result: " + expectedRawBpmn20XmlFileName, e); } } /** * Compares two BPMN 2.0 XML files exported by Signavio using XMLUnit. * * Note that XMLUnit is configured in {@link SignavioConnectorIT#setUp()}. */ private DetailedDiff compareSignavioBpmn20Xml(String expectedRawBpmn20Xml, String actualRawBpmn20Xml) { try { Diff diff = XMLUnit.compareXML(expectedRawBpmn20Xml, actualRawBpmn20Xml); DetailedDiff details = new DetailedDiff(diff); details.overrideDifferenceListener(new SignavioBpmn20XmlDifferenceListener()); details.overrideElementQualifier(new ElementNameAndAttributeQualifier() { @Override public boolean qualifyForComparison(Element control, Element test) { if (test.getLocalName().equals("outgoing")) { return super.qualifyForComparison(control, test) && control.getTextContent().equals(test.getTextContent()); } return super.qualifyForComparison(control, test); } }); return details; } catch (SAXException e) { throw new RuntimeException("Exception during XML comparison.", e); } catch (IOException e) { throw new RuntimeException("Exception during XML comparison.", e); } } private List<String> readModels() { ArrayList<String> models = new ArrayList<String>(); for (String model : testBpmnModels) { models.add(readModel(model)); } return models; } private String readModel(String modelName) { return new Scanner(SignavioConnectorIT.class.getClass().getResourceAsStream("/" + MODEL_FOLDER + "/" + modelName), "UTF-8").useDelimiter("\\A").next(); } private void validateActivitiDeployable(String bpmnXml, String name) { // parse to validate // TODO: Okay, this needs more serious thinking where we get the engine // from! ExpressionManager expressionManager = processEngineConfiguration.getExpressionManager(); BpmnParseFactory bpmnParseFactory = new DefaultBpmnParseFactory(); BpmnParser bpmnParser = new BpmnParser(expressionManager, bpmnParseFactory); Context.setProcessEngineConfiguration(processEngineConfiguration); // Unfortunately the deployment id is requested while parsing, so we have to // set a DeploymentEntity to avoid an NPE DeploymentEntity deployment = new DeploymentEntity(); deployment.setId("VALIDATION_DEPLOYMENT"); // parse to validate BpmnParse parse = bpmnParser.createParse().deployment(deployment).sourceString(bpmnXml).name(name); // parse.execute(); // That's it, now we get an exception if the file is invalid } private String changeTechnicalModel(String replaceRegex, String withReplacement, String technicalModel) { Pattern pattern = Pattern.compile(replaceRegex, Pattern.CASE_INSENSITIVE | Pattern.DOTALL); Matcher matcher = pattern.matcher(technicalModel); assertTrue("Technical model doesn't contain replace string=[" + replaceRegex + "]" + "\n" + technicalModel, matcher.find()); technicalModel = matcher.replaceFirst(withReplacement); // get matched id if (matcher.groupCount() > 0) { String replaceReferencedElementId = matcher.group(1); System.out.println(matcher.group(1)); // adjust all affected ids Pattern replaceIdPattern = Pattern.compile("id=\"(.*?)\"", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); Matcher idMatcher = replaceIdPattern.matcher(withReplacement); if (idMatcher.find()) { System.out.println(idMatcher.group(1)); technicalModel = technicalModel.replaceAll(replaceReferencedElementId, idMatcher.group(1)); } } // if we have specified an activiti attribute in our replacement text, add // activiti namespace if (withReplacement.contains("activiti:")) { Pattern definitionsPattern = Pattern.compile("<definitions", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); assertTrue("Technical model doesn't contain replace string=[" + "<definitions" + "]" + "\n" + technicalModel, definitionsPattern.matcher(technicalModel) .find()); technicalModel = definitionsPattern.matcher(technicalModel).replaceFirst("<definitions xmlns:activiti=\"http://activiti.org/bpmn\""); } return technicalModel; } private SignavioConnector getSignavioConnector() { return this.signavioConnector; } }
cycle/src/test/java/com/camunda/fox/cycle/connector/signavio/SignavioConnectorIT.java
package com.camunda.fox.cycle.connector.signavio; import static org.custommonkey.xmlunit.XMLAssert.assertXpathEvaluatesTo; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Scanner; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.inject.Inject; import org.activiti.engine.impl.bpmn.parser.BpmnParse; import org.activiti.engine.impl.bpmn.parser.BpmnParser; import org.activiti.engine.impl.cfg.BpmnParseFactory; import org.activiti.engine.impl.cfg.DefaultBpmnParseFactory; import org.activiti.engine.impl.cfg.ProcessEngineConfigurationImpl; import org.activiti.engine.impl.context.Context; import org.activiti.engine.impl.el.ExpressionManager; import org.activiti.engine.impl.persistence.entity.DeploymentEntity; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.custommonkey.xmlunit.DetailedDiff; import org.custommonkey.xmlunit.Diff; import org.custommonkey.xmlunit.ElementNameAndAttributeQualifier; import org.custommonkey.xmlunit.SimpleNamespaceContext; import org.custommonkey.xmlunit.XMLUnit; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import com.camunda.fox.cycle.connector.ConnectorNode; import com.camunda.fox.cycle.connector.ConnectorNodeType; import com.camunda.fox.cycle.entity.ConnectorConfiguration; import com.camunda.fox.cycle.roundtrip.BpmnProcessModelUtil; import com.camunda.fox.cycle.roundtrip.XsltTransformer; import com.camunda.fox.cycle.util.BpmnNamespaceContext; import com.camunda.fox.cycle.util.IoUtil; import com.camunda.fox.cycle.util.XmlUtil; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:/spring/test/signavio-connector-xml-config.xml" }) public class SignavioConnectorIT { /** * Files generated for each test are: * initial-raw -> * technical -> * changed-technical (only when you replace something) -> * before-deploy-technical (only when you deploy to engine) -> * merge-source-technical -> * merge-source-business -> * merge-result -> * actual-raw -> * expected-raw -> * xml-diff */ private static final String INITIAL_RAW_BPMN20_XML = "initial-raw.bpmn"; private static final String TECHNICAL_BPMN20_XML = "technical.bpmn"; private static final String CHANGED_TECHNICAL_BPMN20_XML = "changed-technical.bpmn"; private static final String BEFORE_DEPLOY_TECHNICAL_BPMN20_XML = "before-deploy-technical.bpmn"; private static final String ACTUAL_RAW_BPMN20_XML = "actual-raw.bpmn"; private static final String EXPECTED_RAW_BPMN20_XML = "expected-raw.bpmn"; private static final String XML_DIFF_TXT = "xml-diff.txt"; private static final String MODEL_FOLDER = "models"; private static final String DEBUG_DIR = "target/failsafe-reports"; private static final String TEST_RESOURCES_PATH = "src/test/resources/" + MODEL_FOLDER + "/"; private static final boolean OVERWRITE_EXPECTED_BPMN_FILES = false; private static final String[] testBpmnModels = new String[] { "MyProcess.bpmn", "SimpleProcurementExample.bpmn", "TwitterDemoProcess.bpmn", "TwitterDemoProcess-business-rule-task.bpmn", "TwitterDemoProcess-business-rule-task.dev-friendly.2011-09-01.bpmn" }; private BpmnProcessModelUtil bpmnProcessModelUtil = new BpmnProcessModelUtil(); @Inject private ProcessEngineConfigurationImpl processEngineConfiguration; @Inject private SignavioConnector signavioConnector; @Before public void setUp() throws Exception { XMLUnit.setIgnoreWhitespace(true); XMLUnit.setIgnoreAttributeOrder(true); XMLUnit.setXpathNamespaceContext(new SimpleNamespaceContext(new BpmnNamespaceContext().getNamespaces())); ConnectorConfiguration config = this.getSignavioConnector().getConfiguration(); this.getSignavioConnector().init(config); this.getSignavioConnector().login(config.getGlobalUser(), config.getGlobalPassword()); } @After public void tearDown() throws Exception { this.getSignavioConnector().dispose(); } @Test public void testActivitiExtensionsImportExport() throws Exception { final String ACT_ELEMENT_COUNT = "count(//*[namespace-uri() = 'http://activiti.org/bpmn'])"; final String ACT_ATTRIBUTE_COUNT = "count(//@*[namespace-uri() = 'http://activiti.org/bpmn'])"; for (String model : readModels()){ String expectedXml = model; String expectedElementCount = XmlUtil.getXPathResult(ACT_ELEMENT_COUNT, new InputSource(IOUtils.toInputStream(expectedXml, "UTF-8"))); String expectedAttributeCount = XmlUtil.getXPathResult(ACT_ATTRIBUTE_COUNT, new InputSource(IOUtils.toInputStream(expectedXml, "UTF-8"))); ConnectorNode importedNode = this.getSignavioConnector().importContent(this.getSignavioConnector().getPrivateFolder(), expectedXml); InputStream actualXmlInputStream = this.getSignavioConnector().getContent(importedNode); String actualXml = IOUtils.toString(actualXmlInputStream, "UTF-8"); actualXmlInputStream.close(); this.getSignavioConnector().deleteNode(importedNode); assertXpathEvaluatesTo(expectedElementCount, ACT_ELEMENT_COUNT, actualXml); assertXpathEvaluatesTo(expectedAttributeCount, ACT_ATTRIBUTE_COUNT, actualXml); } } @Test public void testImportTechnicalModel() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("MyProcess_executable.bpmn"); } @Test public void testRoundtripWithMessageEvents() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("testRoundtripWithMessageEvents.sgx", "<serviceTask\\sid=\"(test(_\\d*))\"\\sname=\"test\"/>", "<serviceTask id=\"testTwitter\" name=\"testTwitter\" activiti:class=\"com.camunda.fox.demo.twitter.TweetContentDelegate\" />"); } @Test public void test_HEMERA_1319() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1319.sgx", "<serviceTask\\sid=\"(Task(_\\d*))\"/>", "<serviceTask id=\"Task_abc\" name=\"Task_abc\" activiti:class=\"com.camunda.fox.demo.pdf.SavePdfDelegate\" />"); } @Test public void test_HEMERA_1610() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1610.sgx", "<serviceTask\\sid=\"(PDF_in_SVN_ablegen(_\\d*))\"\\sname=\"PDF\\sin\\sSVN\\sablegen\"/>", "<serviceTask id=\"PDF_in_SVN_ablegen_abc\" name=\"PDF in SVN ablegen\" activiti:class=\"com.camunda.fox.demo.pdf.SavePdfDelegate\" />"); } @Test public void test_HEMERA_1348_RoundtripWithMessageEventsAdvanced() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1348.sgx"); } @Test public void test_HEMERA_1791_changeEngineProcessId() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791-Collaboration.bpmn", "<process\\sid=\"(HEMERA-1791-Collaboration_)\"\\sisExecutable=\"true\"\\s*name=\"Process\\sEngine\">", "<process id=\"changed-processid-HEMERA-1791-blub-Collaboration_\" isClosed=\"true\" isExecutable=\"true\" name=\"Process Engine\">"); } @Test public void test_HEMERA_1791_changeEngineProcessName() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791-Collaboration.bpmn", "<process\\sid=\"HEMERA-1791-Collaboration_\"\\sisExecutable=\"true\"\\s*name=\"Process\\sEngine\">", "<process id=\"HEMERA-1791-Collaboration_\" isExecutable=\"true\" name=\"changed-processname-Process Engine\">"); } @Test public void test_HEMERA_1791_changeEngineProcessIdAndName() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791-Collaboration.bpmn", "<process\\sid=\"(HEMERA-1791-Collaboration_)\"\\sisExecutable=\"true\"\\s*name=\"Process\\sEngine\">", "<process id=\"changed-processid-HEMERA-1791-blub-Collaboration_\" isExecutable=\"true\" name=\"change-processname-Process Engine\">"); } @Test public void test_HEMERA_2219() throws Exception { String roundtripResult = bpmnPoolExtractionRoundtrip("HEMERA-2219.sgx", false, null, null, true, "HEMERA-2219-import.bpmn"); assertXpathEvaluatesTo("2", "count(//bpmn:message)", roundtripResult); assertXpathEvaluatesTo("2", "count(//bpmn:error)", roundtripResult); } @Test public void test_HEMERA_2379() throws Exception { String businessModel = bpmnPoolExtractionRoundtrip("HEMERA-2379.sgx", false, null, null, false, null); // initial x/y-coordinates of messageflow-endpoint (messageflow from non technical pool to technical pool) assertXpathEvaluatesTo("1840.0", "//bpmndi:BPMNEdge[@bpmnElement='messageFlow_24']/omgdi:waypoint[last()]/@x", businessModel); assertXpathEvaluatesTo("852.0", "//bpmndi:BPMNEdge[@bpmnElement='messageFlow_24']/omgdi:waypoint[last()]/@y", businessModel); // initial x/y-coordinates of messageflow-startpoint (messageflow from technical pool to non technical pool) assertXpathEvaluatesTo("1807.0", "//bpmndi:BPMNEdge[@bpmnElement='sid-3630ECEC-DC84-43F3-8373-6EE89E88DA7A']/omgdi:waypoint[1]/@x", businessModel); assertXpathEvaluatesTo("852.0", "//bpmndi:BPMNEdge[@bpmnElement='sid-3630ECEC-DC84-43F3-8373-6EE89E88DA7A']/omgdi:waypoint[1]/@y", businessModel); } @Test public void test_HEMERA_2379_withImport() throws Exception { String roundtripResult = bpmnPoolExtractionRoundtrip("HEMERA-2379.sgx", false, null, null, true, "HEMERA-2379-import.bpmn"); // x/y-coordinates of the messageflow-endpoint after the import of the changed technical model (messageflow from non technical pool to technical pool) assertXpathEvaluatesTo("1840.0", "//bpmndi:BPMNEdge[@bpmnElement='messageFlow_24']/omgdi:waypoint[last()]/@x", roundtripResult); assertXpathEvaluatesTo("757.0", "//bpmndi:BPMNEdge[@bpmnElement='messageFlow_24']/omgdi:waypoint[last()]/@y", roundtripResult); // the message flow-target is now the pool-border // x/y-coordinates of the messageflow-startpoint after the import of the changed technical model (messageflow from technical pool to non technical pool) assertXpathEvaluatesTo("1807.0", "//bpmndi:BPMNEdge[@bpmnElement='sid-3630ECEC-DC84-43F3-8373-6EE89E88DA7A']/omgdi:waypoint[1]/@x", roundtripResult); assertXpathEvaluatesTo("757.0", "//bpmndi:BPMNEdge[@bpmnElement='sid-3630ECEC-DC84-43F3-8373-6EE89E88DA7A']/omgdi:waypoint[1]/@y", roundtripResult); // the message flow-source is now the pool-border assertXpathEvaluatesTo("AutomatedAcquisition", "//bpmn:message[1]/@name", roundtripResult); assertXpathEvaluatesTo("ManualAcquisition", "//bpmn:message[2]/@name", roundtripResult); } @Test public void test_HEMERA_1791() throws Exception { // shows also that pool is removed if it's the only participant in collaboration bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791.bpmn"); } @Test public void test_HEMERA_1791_Collaboration() throws Exception { // shows also that pool is retained if it's no the only participant in collaboration bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1791-Collaboration.bpmn"); } @Test public void test_HEMERA_1820() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1820.bpmn"); } @Test public void test_HEMERA_1821() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1821.bpmn"); } @Test public void test_HEMERA_1942() throws Exception { bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy("HEMERA-1942.sgx"); } @Test public void test_ErrorBoundaryEventsInSubProcess() throws Exception { bpmnPoolExtractionRoundtrip("SubprocessBoundaryEventBug.sgx", false, null, null, true, null); } // -------------------------------- CONVIENCE METHODS ------------------------------ // private void bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy(String filename) throws Exception { bpmnPoolExtractionRoundtripWithEngineDeploy(filename, true, null, null); } private void bpmnSimplePoolExtractionRoundtripWithDevFriendlyEngineDeploy(String filename, String replaceRegex, String withReplacement) throws Exception { bpmnPoolExtractionRoundtripWithEngineDeploy(filename, true, replaceRegex, withReplacement); } private void bpmnPoolExtractionRoundtripWithEngineDeploy(String filename, boolean devFriendly, String replaceRegex, String withReplacement) throws Exception { bpmnPoolExtractionRoundtrip(filename, devFriendly, replaceRegex, withReplacement, true); } private void bpmnPoolExtractionRoundtrip(String filename, boolean devFriendly, String replaceRegex, String withReplacement, boolean deployToEngine) throws Exception { bpmnPoolExtractionRoundtrip(filename, devFriendly, replaceRegex, withReplacement, deployToEngine, null); } /** * Helper which allows to import a signavioArchive or a bpmn model into * Signavio and does the bpmn roundtrip. * * @param filename * the name of the file * @param devFriendly * should the model be made developer friendly * @param replaceRegex * replace the given string * @param withReplacement * with this string * @param deployToEngine * should the exported technical model deployed to engine */ private String bpmnPoolExtractionRoundtrip(String filename, boolean devFriendly, String replaceRegex, String withReplacement, boolean deployToEngine, String importXmlFile) throws Exception { // enable writing of results to files IoUtil.DEBUG = true; IoUtil.DEBUG_DIR = DEBUG_DIR; String initialRawBpmn20Xml = null; // the initial business model String technicalModel = null; String actualRawBpmn20Xml = null; // the business model after completed roundtrip InputStream initialRawBpmn20XmlInputStream = null; // the initial business model InputStream technicalModelInputStream = null; InputStream actualRawBpmn20XmlInputStream = null; // the business model after completed roundtrip ConnectorNode folder = null; try { // create directory String folderName = "Cycle: SignavioConnectorIT.testBpmnPoolExtractionRoundtrip " + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()); folder = this.getSignavioConnector().createNode(this.getSignavioConnector().getPrivateFolder().getId(), folderName, ConnectorNodeType.FOLDER); // upload model if (filename.endsWith(".sgx")) { getSignavioConnector().importSignavioArchive(folder, TEST_RESOURCES_PATH + filename); } else if (filename.endsWith(".bpmn")) { getSignavioConnector().importContent(folder, readModel(filename), filename.substring(0, filename.indexOf("."))); } else { fail("Unable to determine type of file to upload! [File=" + filename + "]"); } List<ConnectorNode> models = this.getSignavioConnector().getChildren(folder); for (ConnectorNode node : models) { assertTrue(node instanceof ConnectorNode); ConnectorNode model = (ConnectorNode) node; System.out.println("Testing Pool Extraction Roundtrip with model '" + model.getLabel() + "'..."); initialRawBpmn20XmlInputStream = this.getSignavioConnector().getContent(model); initialRawBpmn20Xml = IOUtils.toString(initialRawBpmn20XmlInputStream, "UTF-8"); IoUtil.closeSilently(initialRawBpmn20XmlInputStream); IoUtil.writeStringToFileIfDebug(initialRawBpmn20Xml, "initial_raw_model", INITIAL_RAW_BPMN20_XML); // ActivitiCompliantBpmn20Provider.writeStringToFileIfDebug(model, initialRawBpmn20Xml, INITIAL_RAW_BPMN20_XML); // export (developer-friendly) BPMN 2.0 XML of Engine Pool if (importXmlFile == null) { if (devFriendly) { String enginePoolId = model.getLabel().replaceFirst("^[^a-zA-Z]", "z").replaceAll("[^a-zA-Z0-9-]", "_").concat("_"); initialRawBpmn20XmlInputStream = IOUtils.toInputStream(initialRawBpmn20Xml, "UTF-8"); ByteArrayOutputStream devFriendlyOutput = XsltTransformer.instance().developerFriendly(initialRawBpmn20XmlInputStream, enginePoolId, true); initialRawBpmn20Xml = devFriendlyOutput.toString("UTF-8"); IoUtil.closeSilently(initialRawBpmn20XmlInputStream); IoUtil.closeSilently(devFriendlyOutput); } initialRawBpmn20XmlInputStream = IOUtils.toInputStream(initialRawBpmn20Xml, "UTF-8"); technicalModelInputStream = this.bpmnProcessModelUtil.extractExecutablePool(initialRawBpmn20XmlInputStream); technicalModel = IOUtils.toString(technicalModelInputStream, "UTF-8"); IoUtil.closeSilently(technicalModelInputStream); IoUtil.closeSilently(initialRawBpmn20XmlInputStream); } else { technicalModel = readModel(importXmlFile); } IoUtil.writeStringToFileIfDebug(technicalModel, "technical_model", TECHNICAL_BPMN20_XML); // do some changes in the model if (replaceRegex != null && !replaceRegex.isEmpty() && withReplacement != null) { technicalModel = changeTechnicalModel(replaceRegex, withReplacement, technicalModel); IoUtil.writeStringToFileIfDebug(technicalModel, "changed_technical_model", CHANGED_TECHNICAL_BPMN20_XML); } // test if technical model deploys to engine if (deployToEngine) { IoUtil.writeStringToFileIfDebug(technicalModel, "technical_model_before_deploy", BEFORE_DEPLOY_TECHNICAL_BPMN20_XML); validateActivitiDeployable(technicalModel, filename); } // import Engine Pool back into collaboration actualRawBpmn20Xml = this.bpmnProcessModelUtil.importChangesFromExecutableBpmnModel(technicalModel, initialRawBpmn20Xml); actualRawBpmn20XmlInputStream = IOUtils.toInputStream(actualRawBpmn20Xml, "UTF-8"); this.getSignavioConnector().updateContent(model, actualRawBpmn20XmlInputStream); IoUtil.closeSilently(actualRawBpmn20XmlInputStream); actualRawBpmn20XmlInputStream = this.getSignavioConnector().getContent(model); actualRawBpmn20Xml = IOUtils.toString(actualRawBpmn20XmlInputStream, "UTF-8"); IoUtil.closeSilently(actualRawBpmn20XmlInputStream); IoUtil.writeStringToFileIfDebug(actualRawBpmn20Xml, "actual_model", ACTUAL_RAW_BPMN20_XML); // compare result with a previous result stored in TEST_RESOURCES_PATH assertRoundtripResultCorrect(filename, importXmlFile, model, replaceRegex, withReplacement, actualRawBpmn20Xml); } } finally { if (folder != null) { // delete folder this.getSignavioConnector().deleteNode(folder); } } return actualRawBpmn20Xml; // last result } private void assertRoundtripResultCorrect(String filename, String importXmlFile, ConnectorNode model, String replaceRegex, String withReplacement, String actualRawBpmn20Xml) { String changeId = DigestUtils.md5Hex(replaceRegex + withReplacement); String expectedRawBpmn20XmlFileName = TEST_RESOURCES_PATH + filename + "+" + importXmlFile + "_" + model.getLabel() + "_change-" + changeId + "_" + EXPECTED_RAW_BPMN20_XML; try { File expectedRawBpmn20XmlFile = new File(expectedRawBpmn20XmlFileName); if (OVERWRITE_EXPECTED_BPMN_FILES) { FileUtils.writeStringToFile(expectedRawBpmn20XmlFile, actualRawBpmn20Xml, "UTF-8"); fail("The assertions of this test only work if SignavioConnectorIT#OVERWRITE_EXPECTED_BPMN_FILES is set to false."); } String expectedRawBpmn20Xml = FileUtils.readFileToString(expectedRawBpmn20XmlFile, "UTF-8").replace("\r", ""); // remove carriage returns in case the files have been fetched via Git on Windows IoUtil.writeStringToFileIfDebug(expectedRawBpmn20Xml, "expected_model", EXPECTED_RAW_BPMN20_XML); // just for convenient comparison DetailedDiff details = compareSignavioBpmn20Xml(expectedRawBpmn20Xml, actualRawBpmn20Xml); IoUtil.writeStringToFileIfDebug("Comparison:" + "\n" + details.toString(), "comparison_details", XML_DIFF_TXT); // show non-recoverable differences if the assertion fails assertTrue("Comparison:" + "\n" + details.toString().replaceAll("\\[not identical\\] [^\n]+\n", "").replaceAll("\n\n+", "\n"), details.similar()); } catch (IOException e) { throw new RuntimeException("Unable to read or write expected result: " + expectedRawBpmn20XmlFileName, e); } } /** * Compares two BPMN 2.0 XML files exported by Signavio using XMLUnit. * * Note that XMLUnit is configured in {@link SignavioConnectorIT#setUp()}. */ private DetailedDiff compareSignavioBpmn20Xml(String expectedRawBpmn20Xml, String actualRawBpmn20Xml) { try { Diff diff = XMLUnit.compareXML(expectedRawBpmn20Xml, actualRawBpmn20Xml); DetailedDiff details = new DetailedDiff(diff); details.overrideDifferenceListener(new SignavioBpmn20XmlDifferenceListener()); details.overrideElementQualifier(new ElementNameAndAttributeQualifier() { @Override public boolean qualifyForComparison(Element control, Element test) { if (test.getLocalName().equals("outgoing")) { return super.qualifyForComparison(control, test) && control.getTextContent().equals(test.getTextContent()); } return super.qualifyForComparison(control, test); } }); return details; } catch (SAXException e) { throw new RuntimeException("Exception during XML comparison.", e); } catch (IOException e) { throw new RuntimeException("Exception during XML comparison.", e); } } private List<String> readModels() { ArrayList<String> models = new ArrayList<String>(); for (String model : testBpmnModels) { models.add(readModel(model)); } return models; } private String readModel(String modelName) { return new Scanner(SignavioConnectorIT.class.getClass().getResourceAsStream("/" + MODEL_FOLDER + "/" + modelName), "UTF-8").useDelimiter("\\A").next(); } private void validateActivitiDeployable(String bpmnXml, String name) { // parse to validate // TODO: Okay, this needs more serious thinking where we get the engine // from! ExpressionManager expressionManager = processEngineConfiguration.getExpressionManager(); BpmnParseFactory bpmnParseFactory = new DefaultBpmnParseFactory(); BpmnParser bpmnParser = new BpmnParser(expressionManager, bpmnParseFactory); Context.setProcessEngineConfiguration(processEngineConfiguration); // Unfortunately the deployment id is requested while parsing, so we have to // set a DeploymentEntity to avoid an NPE DeploymentEntity deployment = new DeploymentEntity(); deployment.setId("VALIDATION_DEPLOYMENT"); // parse to validate BpmnParse parse = bpmnParser.createParse().deployment(deployment).sourceString(bpmnXml).name(name); // parse.execute(); // That's it, now we get an exception if the file is invalid } private String changeTechnicalModel(String replaceRegex, String withReplacement, String technicalModel) { Pattern pattern = Pattern.compile(replaceRegex, Pattern.CASE_INSENSITIVE | Pattern.DOTALL); Matcher matcher = pattern.matcher(technicalModel); assertTrue("Technical model doesn't contain replace string=[" + replaceRegex + "]" + "\n" + technicalModel, matcher.find()); technicalModel = matcher.replaceFirst(withReplacement); // get matched id if (matcher.groupCount() > 0) { String replaceReferencedElementId = matcher.group(1); System.out.println(matcher.group(1)); // adjust all affected ids Pattern replaceIdPattern = Pattern.compile("id=\"(.*?)\"", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); Matcher idMatcher = replaceIdPattern.matcher(withReplacement); if (idMatcher.find()) { System.out.println(idMatcher.group(1)); technicalModel = technicalModel.replaceAll(replaceReferencedElementId, idMatcher.group(1)); } } // if we have specified an activiti attribute in our replacement text, add // activiti namespace if (withReplacement.contains("activiti:")) { Pattern definitionsPattern = Pattern.compile("<definitions", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); assertTrue("Technical model doesn't contain replace string=[" + "<definitions" + "]" + "\n" + technicalModel, definitionsPattern.matcher(technicalModel) .find()); technicalModel = definitionsPattern.matcher(technicalModel).replaceFirst("<definitions xmlns:activiti=\"http://activiti.org/bpmn\""); } return technicalModel; } private SignavioConnector getSignavioConnector() { return this.signavioConnector; } }
Ignore testcase for HEMERA-3125 atm
cycle/src/test/java/com/camunda/fox/cycle/connector/signavio/SignavioConnectorIT.java
Ignore testcase for HEMERA-3125 atm
Java
apache-2.0
0d7e55d5a5fe9dc151658f9b746959026c0e0b4b
0
tmpgit/intellij-community,fitermay/intellij-community,jagguli/intellij-community,jagguli/intellij-community,signed/intellij-community,kdwink/intellij-community,hurricup/intellij-community,caot/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,kool79/intellij-community,adedayo/intellij-community,apixandru/intellij-community,izonder/intellij-community,retomerz/intellij-community,blademainer/intellij-community,izonder/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,semonte/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,semonte/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,signed/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,semonte/intellij-community,kool79/intellij-community,dslomov/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,ibinti/intellij-community,signed/intellij-community,robovm/robovm-studio,supersven/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,supersven/intellij-community,kool79/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,vladmm/intellij-community,asedunov/intellij-community,da1z/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,Lekanich/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,FHannes/intellij-community,allotria/intellij-community,xfournet/intellij-community,adedayo/intellij-community,ryano144/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,diorcety/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,jagguli/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,retomerz/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,samthor/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,slisson/intellij-community,da1z/intellij-community,jagguli/intellij-community,slisson/intellij-community,semonte/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,dslomov/intellij-community,samthor/intellij-community,nicolargo/intellij-community,kool79/intellij-community,fitermay/intellij-community,retomerz/intellij-community,signed/intellij-community,da1z/intellij-community,asedunov/intellij-community,adedayo/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,signed/intellij-community,signed/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,ahb0327/intellij-community,samthor/intellij-community,vladmm/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,hurricup/intellij-community,amith01994/intellij-community,semonte/intellij-community,xfournet/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,holmes/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,retomerz/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,izonder/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,diorcety/intellij-community,slisson/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,caot/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,kool79/intellij-community,caot/intellij-community,adedayo/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,petteyg/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,dslomov/intellij-community,izonder/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,FHannes/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,da1z/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,petteyg/intellij-community,da1z/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,supersven/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,fnouama/intellij-community,supersven/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,robovm/robovm-studio,adedayo/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,blademainer/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,petteyg/intellij-community,amith01994/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,caot/intellij-community,mglukhikh/intellij-community,signed/intellij-community,semonte/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,allotria/intellij-community,wreckJ/intellij-community,holmes/intellij-community,FHannes/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,supersven/intellij-community,diorcety/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,caot/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,robovm/robovm-studio,petteyg/intellij-community,asedunov/intellij-community,amith01994/intellij-community,jagguli/intellij-community,semonte/intellij-community,gnuhub/intellij-community,samthor/intellij-community,semonte/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,signed/intellij-community,FHannes/intellij-community,supersven/intellij-community,ibinti/intellij-community,semonte/intellij-community,fitermay/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,xfournet/intellij-community,adedayo/intellij-community,caot/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,asedunov/intellij-community,semonte/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,caot/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,jagguli/intellij-community,slisson/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,dslomov/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,izonder/intellij-community,caot/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,signed/intellij-community,fnouama/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,da1z/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,allotria/intellij-community,kool79/intellij-community,signed/intellij-community,xfournet/intellij-community,ryano144/intellij-community,supersven/intellij-community,izonder/intellij-community,xfournet/intellij-community,amith01994/intellij-community,vladmm/intellij-community,holmes/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,clumsy/intellij-community,kdwink/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,holmes/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,kool79/intellij-community,ibinti/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,semonte/intellij-community,slisson/intellij-community,fitermay/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,kool79/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,izonder/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,fnouama/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,holmes/intellij-community,samthor/intellij-community,xfournet/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,samthor/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,signed/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,signed/intellij-community,ryano144/intellij-community,apixandru/intellij-community,robovm/robovm-studio,jagguli/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,clumsy/intellij-community,da1z/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,caot/intellij-community,holmes/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,vladmm/intellij-community,allotria/intellij-community,petteyg/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zmlx.hg4idea.provider; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.CheckoutProvider; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.zmlx.hg4idea.HgVcs; import org.zmlx.hg4idea.HgVcsMessages; import org.zmlx.hg4idea.action.HgCommandResultNotifier; import org.zmlx.hg4idea.command.HgCloneCommand; import org.zmlx.hg4idea.execution.HgCommandResult; import org.zmlx.hg4idea.ui.HgCloneDialog; import org.zmlx.hg4idea.util.HgErrorUtil; import java.io.File; import java.util.concurrent.atomic.AtomicReference; public class HgCheckoutProvider implements CheckoutProvider { public void doCheckout(@NotNull final Project project, @Nullable final Listener listener) { FileDocumentManager.getInstance().saveAllDocuments(); final HgCloneDialog dialog = new HgCloneDialog(project); if (!dialog.showAndGet()) { return; } dialog.rememberSettings(); VirtualFile destinationParent = LocalFileSystem.getInstance().findFileByIoFile(new File(dialog.getParentDirectory())); if (destinationParent == null) { return; } final String targetDir = destinationParent.getPath() + File.separator + dialog.getDirectoryName(); final String sourceRepositoryURL = dialog.getSourceRepositoryURL(); final AtomicReference<HgCommandResult> cloneResult = new AtomicReference<HgCommandResult>(); new Task.Backgroundable(project, HgVcsMessages.message("hg4idea.clone.progress", sourceRepositoryURL), true) { @Override public void run(@NotNull ProgressIndicator indicator) { HgCloneCommand clone = new HgCloneCommand(project); clone.setRepositoryURL(sourceRepositoryURL); clone.setDirectory(targetDir); cloneResult.set(clone.execute()); } @Override public void onSuccess() { if (cloneResult.get() == null || HgErrorUtil.hasErrorsInCommandExecution(cloneResult.get())) { new HgCommandResultNotifier(project).notifyError(cloneResult.get(), "Clone failed", "Clone from " + sourceRepositoryURL + " failed."); } else if (listener != null) { listener.directoryCheckedOut(new File(dialog.getParentDirectory(), dialog.getDirectoryName()), HgVcs.getKey()); listener.checkoutCompleted(); } } }.queue(); } public String getVcsName() { return "_Mercurial"; } }
plugins/hg4idea/src/org/zmlx/hg4idea/provider/HgCheckoutProvider.java
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zmlx.hg4idea.provider; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.CheckoutProvider; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.zmlx.hg4idea.HgVcs; import org.zmlx.hg4idea.HgVcsMessages; import org.zmlx.hg4idea.action.HgCommandResultNotifier; import org.zmlx.hg4idea.command.HgCloneCommand; import org.zmlx.hg4idea.execution.HgCommandResult; import org.zmlx.hg4idea.ui.HgCloneDialog; import org.zmlx.hg4idea.util.HgErrorUtil; import java.io.File; /** * Checkout provider for Mercurial */ public class HgCheckoutProvider implements CheckoutProvider { public void doCheckout(@NotNull final Project project, @Nullable final Listener listener) { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { FileDocumentManager.getInstance().saveAllDocuments(); } }); final HgCloneDialog dialog = new HgCloneDialog(project); if (!dialog.showAndGet()) { return; } dialog.rememberSettings(); final VirtualFile destinationParent = LocalFileSystem.getInstance().findFileByIoFile(new File(dialog.getParentDirectory())); if (destinationParent == null) { return; } final String targetDir = destinationParent.getPath() + File.separator + dialog.getDirectoryName(); final String sourceRepositoryURL = dialog.getSourceRepositoryURL(); new Task.Backgroundable(project, HgVcsMessages.message("hg4idea.clone.progress", sourceRepositoryURL), true) { @Override public void run(@NotNull ProgressIndicator indicator) { // clone HgCloneCommand clone = new HgCloneCommand(project); clone.setRepositoryURL(sourceRepositoryURL); clone.setDirectory(targetDir); // handle result HgCommandResult myCloneResult = clone.execute(); if (myCloneResult == null || HgErrorUtil.hasErrorsInCommandExecution(myCloneResult)) { new HgCommandResultNotifier(project).notifyError(myCloneResult, "Clone failed", "Clone from " + sourceRepositoryURL + " failed."); } else { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (listener != null) { listener.directoryCheckedOut(new File(dialog.getParentDirectory(), dialog.getDirectoryName()), HgVcs.getKey()); listener.checkoutCompleted(); } } }); } } }.queue(); } /** * {@inheritDoc} */ public String getVcsName() { return "_Mercurial"; } }
[hg] cleanup & move result handling to onSuccess from invokeLater
plugins/hg4idea/src/org/zmlx/hg4idea/provider/HgCheckoutProvider.java
[hg] cleanup & move result handling to onSuccess from invokeLater
Java
apache-2.0
6625fca814c9e04d6d2adb0a16a229b3913679df
0
semonte/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,apixandru/intellij-community,asedunov/intellij-community,allotria/intellij-community,vvv1559/intellij-community,semonte/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,allotria/intellij-community,da1z/intellij-community,allotria/intellij-community,youdonghai/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,signed/intellij-community,apixandru/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,apixandru/intellij-community,signed/intellij-community,FHannes/intellij-community,semonte/intellij-community,signed/intellij-community,semonte/intellij-community,suncycheng/intellij-community,signed/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,xfournet/intellij-community,da1z/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,semonte/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,apixandru/intellij-community,FHannes/intellij-community,asedunov/intellij-community,da1z/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,da1z/intellij-community,vvv1559/intellij-community,da1z/intellij-community,xfournet/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,allotria/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,apixandru/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,apixandru/intellij-community,da1z/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,apixandru/intellij-community,xfournet/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,asedunov/intellij-community,FHannes/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,da1z/intellij-community,xfournet/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,signed/intellij-community,signed/intellij-community,asedunov/intellij-community,FHannes/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,signed/intellij-community,signed/intellij-community,apixandru/intellij-community,semonte/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,signed/intellij-community,suncycheng/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,youdonghai/intellij-community,signed/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,allotria/intellij-community,asedunov/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,ibinti/intellij-community,semonte/intellij-community,xfournet/intellij-community,apixandru/intellij-community,semonte/intellij-community,asedunov/intellij-community,signed/intellij-community,xfournet/intellij-community,semonte/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,da1z/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.configurations; import com.intellij.execution.CommandLineUtil; import com.intellij.execution.ExecutionException; import com.intellij.execution.Platform; import com.intellij.execution.process.ProcessNotCreatedException; import com.intellij.ide.IdeBundle; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.UserDataHolder; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.util.EnvironmentUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.CaseInsensitiveStringHashingStrategy; import gnu.trove.THashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.*; /** * OS-independent way of executing external processes with complex parameters. * <p> * Main idea of the class is to accept parameters "as-is", just as they should look to an external process, and quote/escape them * as required by the underlying platform - so to run some program with a "parameter with space" all that's needed is * {@code new GeneralCommandLine("some program", "parameter with space").createProcess()}. * <p> * Consider the following things when using this class. * * <h3>Working directory</h3> * By default, a current directory of the IDE process is used (usually a "bin/" directory of IDE installation). * If a child process may create files in it, this choice is unwelcome. On the other hand, informational commands (e.g. "git --version") * are safe. When unsure, set it to something neutral - like user's home or a temp directory. * * <h3>Parent Environment</h3> * {@link ParentEnvironmentType Three options here}. * For commands designed from the ground up for typing into a terminal, use {@link ParentEnvironmentType#CONSOLE CONSOLE} * (typical cases: version controls, Node.js and all the stuff around it, Python and Ruby interpreters and utilities, etc). * For GUI apps and CLI tools which aren't primarily intended to be launched by humans, use {@link ParentEnvironmentType#SYSTEM SYSTEM} * (examples: UI builders, browsers, XCode components). For the empty environment, there is {@link ParentEnvironmentType#NONE NONE}. * According to an extensive research conducted by British scientists (tm) on a diverse population of both wild and domesticated tools * (no one was harmed), most of them are either insensitive to an environment or fall into the first category, * thus backing up the choice of CONSOLE as the default value. * * <h3>Encoding/Charset</h3> * The {@link #getCharset()} method is used by classes like {@link com.intellij.execution.process.OSProcessHandler OSProcessHandler} * or {@link com.intellij.execution.util.ExecUtil ExecUtil} to decode bytes of a child's output stream. For proper conversion, * the same value should be used on another side of the pipe. Chances are you don't have to mess with the setting - * because a platform-dependent guessing behind {@link Charset#defaultCharset()} is used by default and a child process * may happen to use a similar heuristic. * If the above automagic fails or more control is needed, the charset may be set explicitly. Again, do not forget the other side - * call {@code addParameter("-Dfile.encoding=...")} for Java-based tools, or use {@code withEnvironment("HGENCODING", "...")} * for Mercurial, etc. * * @see com.intellij.execution.util.ExecUtil * @see com.intellij.execution.process.OSProcessHandler */ public class GeneralCommandLine implements UserDataHolder { private static final Logger LOG = Logger.getInstance("#com.intellij.execution.configurations.GeneralCommandLine"); /** * Determines the scope of a parent environment passed to a child process. * <p> * {@code NONE} means a child process will receive an empty environment. <br/> * {@code SYSTEM} will provide it with the same environment as an IDE. <br/> * {@code CONSOLE} provides the child with a similar environment as if it was launched from, well, a console. * On OS X, a console environment is simulated (see {@link EnvironmentUtil#getEnvironmentMap()} for reasons it's needed * and details on how it works). On Windows and Unix hosts, this option is no different from {@code SYSTEM} * since there is no drastic distinction in environment between GUI and console apps. */ public enum ParentEnvironmentType {NONE, SYSTEM, CONSOLE} private String myExePath; private File myWorkDirectory; private final Map<String, String> myEnvParams = new MyTHashMap(); private ParentEnvironmentType myParentEnvironmentType = ParentEnvironmentType.CONSOLE; private final ParametersList myProgramParams = new ParametersList(); private Charset myCharset = CharsetToolkit.getDefaultSystemCharset(); private boolean myRedirectErrorStream = false; private Map<Object, Object> myUserData; public GeneralCommandLine() { } public GeneralCommandLine(@NotNull String... command) { this(Arrays.asList(command)); } public GeneralCommandLine(@NotNull List<String> command) { int size = command.size(); if (size > 0) { setExePath(command.get(0)); if (size > 1) { addParameters(command.subList(1, size)); } } } @NotNull public String getExePath() { return myExePath; } @NotNull public GeneralCommandLine withExePath(@NotNull String exePath) { myExePath = exePath.trim(); return this; } public void setExePath(@NotNull String exePath) { withExePath(exePath); } public File getWorkDirectory() { return myWorkDirectory; } @NotNull public GeneralCommandLine withWorkDirectory(@Nullable String path) { return withWorkDirectory(path != null ? new File(path) : null); } @NotNull public GeneralCommandLine withWorkDirectory(@Nullable File workDirectory) { myWorkDirectory = workDirectory; return this; } public void setWorkDirectory(@Nullable String path) { withWorkDirectory(path); } public void setWorkDirectory(@Nullable File workDirectory) { withWorkDirectory(workDirectory); } /** * Note: the map returned is forgiving to passing null values into putAll(). */ @NotNull public Map<String, String> getEnvironment() { return myEnvParams; } @NotNull public GeneralCommandLine withEnvironment(@Nullable Map<String, String> environment) { if (environment != null) { getEnvironment().putAll(environment); } return this; } @NotNull public GeneralCommandLine withEnvironment(@NotNull String key, @NotNull String value) { getEnvironment().put(key, value); return this; } public boolean isPassParentEnvironment() { return myParentEnvironmentType != ParentEnvironmentType.NONE; } /** @deprecated use {@link #withParentEnvironmentType(ParentEnvironmentType)} */ public GeneralCommandLine withPassParentEnvironment(boolean passParentEnvironment) { return withParentEnvironmentType(passParentEnvironment ? ParentEnvironmentType.CONSOLE : ParentEnvironmentType.NONE); } /** @deprecated use {@link #withParentEnvironmentType(ParentEnvironmentType)} */ public void setPassParentEnvironment(boolean passParentEnvironment) { withParentEnvironmentType(passParentEnvironment ? ParentEnvironmentType.CONSOLE : ParentEnvironmentType.NONE); } @NotNull public ParentEnvironmentType getParentEnvironmentType() { return myParentEnvironmentType; } @NotNull public GeneralCommandLine withParentEnvironmentType(@NotNull ParentEnvironmentType type) { myParentEnvironmentType = type; return this; } /** * Returns an environment that will be inherited by a child process. * @see #getEffectiveEnvironment() */ @NotNull public Map<String, String> getParentEnvironment() { switch (myParentEnvironmentType) { case SYSTEM: return System.getenv(); case CONSOLE: return EnvironmentUtil.getEnvironmentMap(); default: return Collections.emptyMap(); } } /** * Returns an environment as seen by a child process, * that is the {@link #getEnvironment() environment} merged with the {@link #getParentEnvironment() parent} one. */ @NotNull public Map<String, String> getEffectiveEnvironment() { MyTHashMap env = new MyTHashMap(); setupEnvironment(env); return env; } public void addParameters(@NotNull String... parameters) { withParameters(parameters); } public void addParameters(@NotNull List<String> parameters) { withParameters(parameters); } @NotNull public GeneralCommandLine withParameters(@NotNull String... parameters) { for (String parameter : parameters) addParameter(parameter); return this; } @NotNull public GeneralCommandLine withParameters(@NotNull List<String> parameters) { for (String parameter : parameters) addParameter(parameter); return this; } public void addParameter(@NotNull String parameter) { myProgramParams.add(parameter); } @NotNull public ParametersList getParametersList() { return myProgramParams; } @NotNull public Charset getCharset() { return myCharset; } @NotNull public GeneralCommandLine withCharset(@NotNull Charset charset) { myCharset = charset; return this; } public void setCharset(@NotNull Charset charset) { withCharset(charset); } public boolean isRedirectErrorStream() { return myRedirectErrorStream; } @NotNull public GeneralCommandLine withRedirectErrorStream(boolean redirectErrorStream) { myRedirectErrorStream = redirectErrorStream; return this; } public void setRedirectErrorStream(boolean redirectErrorStream) { withRedirectErrorStream(redirectErrorStream); } /** * Returns string representation of this command line.<br/> * Warning: resulting string is not OS-dependent - <b>do not</b> use it for executing this command line. * * @return single-string representation of this command line. */ @NotNull public String getCommandLineString() { return getCommandLineString(null); } /** * Returns string representation of this command line.<br/> * Warning: resulting string is not OS-dependent - <b>do not</b> use it for executing this command line. * * @param exeName use this executable name instead of given by {@link #setExePath(String)} * @return single-string representation of this command line. */ @NotNull public String getCommandLineString(@Nullable String exeName) { return ParametersList.join(getCommandLineList(exeName)); } @NotNull public List<String> getCommandLineList(@Nullable String exeName) { List<String> commands = new ArrayList<>(); if (exeName != null) { commands.add(exeName); } else if (myExePath != null) { commands.add(myExePath); } else { commands.add("<null>"); } commands.addAll(myProgramParams.getList()); return commands; } /** * Prepares command (quotes and escapes all arguments) and returns it as a newline-separated list * (suitable e.g. for passing in an environment variable). * * @param platform a target platform * @return command as a newline-separated list. */ @NotNull public String getPreparedCommandLine(@NotNull Platform platform) { String exePath = myExePath != null ? myExePath : ""; return StringUtil.join(CommandLineUtil.toCommandLine(exePath, myProgramParams.getList(), platform), "\n"); } @NotNull public Process createProcess() throws ExecutionException { if (LOG.isDebugEnabled()) { LOG.debug("Executing [" + getCommandLineString() + "]"); LOG.debug(" environment: " + myEnvParams + " (+" + myParentEnvironmentType + ")"); LOG.debug(" charset: " + myCharset); } List<String> commands; try { checkWorkingDirectory(); if (StringUtil.isEmptyOrSpaces(myExePath)) { throw new ExecutionException(IdeBundle.message("run.configuration.error.executable.not.specified")); } commands = CommandLineUtil.toCommandLine(myExePath, myProgramParams.getList()); } catch (ExecutionException e) { LOG.info(e); throw e; } try { return startProcess(commands); } catch (IOException e) { LOG.info(e); throw new ProcessNotCreatedException(e.getMessage(), e, this); } } @NotNull protected Process startProcess(@NotNull List<String> commands) throws IOException { ProcessBuilder builder = new ProcessBuilder(commands); setupEnvironment(builder.environment()); builder.directory(myWorkDirectory); builder.redirectErrorStream(myRedirectErrorStream); return builder.start(); } private void checkWorkingDirectory() throws ExecutionException { if (myWorkDirectory == null) { return; } if (!myWorkDirectory.exists()) { throw new ExecutionException(IdeBundle.message("run.configuration.error.working.directory.does.not.exist", myWorkDirectory)); } if (!myWorkDirectory.isDirectory()) { throw new ExecutionException(IdeBundle.message("run.configuration.error.working.directory.not.directory", myWorkDirectory)); } } protected void setupEnvironment(@NotNull Map<String, String> environment) { environment.clear(); if (myParentEnvironmentType != ParentEnvironmentType.NONE) { environment.putAll(getParentEnvironment()); } if (SystemInfo.isUnix) { File workDirectory = getWorkDirectory(); if (workDirectory != null) { environment.put("PWD", FileUtil.toSystemDependentName(workDirectory.getAbsolutePath())); } } if (!myEnvParams.isEmpty()) { if (SystemInfo.isWindows) { THashMap<String, String> envVars = new THashMap<>(CaseInsensitiveStringHashingStrategy.INSTANCE); envVars.putAll(environment); envVars.putAll(myEnvParams); environment.clear(); environment.putAll(envVars); } else { environment.putAll(myEnvParams); } } } /** * Normally, double quotes in parameters are escaped so they arrive to a called program as-is. * But some commands (e.g. {@code 'cmd /c start "title" ...'}) should get they quotes non-escaped. * Wrapping a parameter by this method (instead of using quotes) will do exactly this. * * @see com.intellij.execution.util.ExecUtil#getTerminalCommand(String, String) */ @NotNull public static String inescapableQuote(@NotNull String parameter) { return CommandLineUtil.specialQuote(parameter); } @Override public String toString() { return myExePath + " " + myProgramParams; } @Nullable @Override public <T> T getUserData(@NotNull Key<T> key) { if (myUserData != null) { @SuppressWarnings({"UnnecessaryLocalVariable", "unchecked"}) T t = (T)myUserData.get(key); return t; } return null; } @Override public <T> void putUserData(@NotNull Key<T> key, @Nullable T value) { if (myUserData == null) { if (value == null) return; myUserData = ContainerUtil.newHashMap(); } myUserData.put(key, value); } private static class MyTHashMap extends THashMap<String, String> { private MyTHashMap() { super(SystemInfo.isWindows ? CaseInsensitiveStringHashingStrategy.INSTANCE : ContainerUtil.canonicalStrategy()); } @Override public String put(String key, String value) { if (key == null || value == null) { LOG.error(new Exception("Nulls are not allowed")); return null; } if (key.isEmpty()) { // Windows: passing an environment variable with empty name causes "CreateProcess error=87, The parameter is incorrect" LOG.warn("Skipping environment variable with empty name, value: " + value); return null; } return super.put(key, value); } @Override public void putAll(Map<? extends String, ? extends String> map) { if (map != null) { super.putAll(map); } } } }
platform/platform-api/src/com/intellij/execution/configurations/GeneralCommandLine.java
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.configurations; import com.intellij.execution.CommandLineUtil; import com.intellij.execution.ExecutionException; import com.intellij.execution.Platform; import com.intellij.execution.process.ProcessNotCreatedException; import com.intellij.ide.IdeBundle; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.UserDataHolder; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.util.EnvironmentUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.CaseInsensitiveStringHashingStrategy; import gnu.trove.THashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.*; /** * OS-independent way of executing external processes with complex parameters. * <p> * Main idea of the class is to accept parameters "as-is", just as they should look to an external process, and quote/escape them * as required by the underlying platform - so to run some program with a "parameter with space" all that's needed is * {@code new GeneralCommandLine("some program", "parameter with space").createProcess()}. * <p> * Consider the following things when using this class. * * <h3>Working directory</h3> * By default, a current directory of the IDE process is used (usually a "bin/" directory of IDE installation). * If a child process may create files in it, this choice is unwelcome. On the other hand, informational commands (e.g. "git --version") * are safe. When unsure, set it to something neutral - like user's home or a temp directory. * * <h3>Parent Environment</h3> * {@link ParentEnvironmentType Three options here}. * For commands designed from the ground up for typing into a terminal, use {@link ParentEnvironmentType#CONSOLE CONSOLE} * (typical cases: version controls, Node.js and all the stuff around it, Python and Ruby interpreters and utilities, etc). * For GUI apps and CLI tools which aren't primarily intended to be launched by humans, use {@link ParentEnvironmentType#SYSTEM SYSTEM} * (examples: UI builders, browsers, XCode components). For the empty environment, there is {@link ParentEnvironmentType#NONE NONE}. * According to an extensive research conducted by British scientists (tm) on a diverse population of both wild and domesticated tools * (no one was harmed), most of them are either insensitive to an environment or fall into the first category, * thus backing up the choice of CONSOLE as the default value. * * <h3>Encoding/Charset</h3> * The {@link #getCharset()} method is used by classes like {@link com.intellij.execution.process.OSProcessHandler OSProcessHandler} * or {@link com.intellij.execution.util.ExecUtil ExecUtil} to decode bytes of a child's output stream. For proper conversion, * the same value should be used on another side of the pipe. Chances are you don't have to mess with the setting - * because a platform-dependent guessing behind {@link Charset#defaultCharset()} is used by default and a child process * may happen to use a similar heuristic. * If the above automagic fails or more control is needed, the charset may be set explicitly. Again, do not forget the other side - * call {@code addParameter("-Dfile.encoding=...")} for Java-based tools, or use {@code withEnvironment("HGENCODING", "...")} * for Mercurial, etc. * * @see com.intellij.execution.util.ExecUtil * @see com.intellij.execution.process.OSProcessHandler */ public class GeneralCommandLine implements UserDataHolder { private static final Logger LOG = Logger.getInstance("#com.intellij.execution.configurations.GeneralCommandLine"); /** * Determines the scope of a parent environment passed to a child process. * <p> * {@code NONE} means a child process will receive an empty environment. <br/> * {@code SYSTEM} will provide it with the same environment as an IDE. <br/> * {@code CONSOLE} provides the child with a similar environment as if it was launched from, well, a console. * On OS X, a console environment is simulated (see {@link EnvironmentUtil#getEnvironmentMap()} for reasons it's needed * and details on how it works). On Windows and Unix hosts, this option is no different from {@code SYSTEM} * since there is no drastic distinction in environment between GUI and console apps. */ public enum ParentEnvironmentType {NONE, SYSTEM, CONSOLE} private String myExePath; private File myWorkDirectory; private final Map<String, String> myEnvParams = new MyTHashMap(); private ParentEnvironmentType myParentEnvironmentType = ParentEnvironmentType.CONSOLE; private final ParametersList myProgramParams = new ParametersList(); private Charset myCharset = CharsetToolkit.getDefaultSystemCharset(); private boolean myRedirectErrorStream = false; private Map<Object, Object> myUserData; public GeneralCommandLine() { } public GeneralCommandLine(@NotNull String... command) { this(Arrays.asList(command)); } public GeneralCommandLine(@NotNull List<String> command) { int size = command.size(); if (size > 0) { setExePath(command.get(0)); if (size > 1) { addParameters(command.subList(1, size)); } } } @NotNull public String getExePath() { return myExePath; } @NotNull public GeneralCommandLine withExePath(@NotNull String exePath) { myExePath = exePath.trim(); return this; } public void setExePath(@NotNull String exePath) { withExePath(exePath); } public File getWorkDirectory() { return myWorkDirectory; } @NotNull public GeneralCommandLine withWorkDirectory(@Nullable String path) { return withWorkDirectory(path != null ? new File(path) : null); } @NotNull public GeneralCommandLine withWorkDirectory(@Nullable File workDirectory) { myWorkDirectory = workDirectory; return this; } public void setWorkDirectory(@Nullable String path) { withWorkDirectory(path); } public void setWorkDirectory(@Nullable File workDirectory) { withWorkDirectory(workDirectory); } /** * Note: the map returned is forgiving to passing null values into putAll(). */ @NotNull public Map<String, String> getEnvironment() { return myEnvParams; } @NotNull public GeneralCommandLine withEnvironment(@Nullable Map<String, String> environment) { if (environment != null) { getEnvironment().putAll(environment); } return this; } @NotNull public GeneralCommandLine withEnvironment(@NotNull String key, @NotNull String value) { getEnvironment().put(key, value); return this; } public boolean isPassParentEnvironment() { return myParentEnvironmentType != ParentEnvironmentType.NONE; } /** @deprecated use {@link #withParentEnvironmentType(ParentEnvironmentType)} */ public GeneralCommandLine withPassParentEnvironment(boolean passParentEnvironment) { return withParentEnvironmentType(passParentEnvironment ? ParentEnvironmentType.CONSOLE : ParentEnvironmentType.NONE); } /** @deprecated use {@link #withParentEnvironmentType(ParentEnvironmentType)} */ public void setPassParentEnvironment(boolean passParentEnvironment) { withParentEnvironmentType(passParentEnvironment ? ParentEnvironmentType.CONSOLE : ParentEnvironmentType.NONE); } @NotNull public ParentEnvironmentType getParentEnvironmentType() { return myParentEnvironmentType; } @NotNull public GeneralCommandLine withParentEnvironmentType(@NotNull ParentEnvironmentType type) { myParentEnvironmentType = type; return this; } /** * Returns an environment that will be passed to a child process. */ @NotNull public Map<String, String> getParentEnvironment() { switch (myParentEnvironmentType) { case SYSTEM: return System.getenv(); case CONSOLE: return EnvironmentUtil.getEnvironmentMap(); default: return Collections.emptyMap(); } } public void addParameters(@NotNull String... parameters) { withParameters(parameters); } public void addParameters(@NotNull List<String> parameters) { withParameters(parameters); } @NotNull public GeneralCommandLine withParameters(@NotNull String... parameters) { for (String parameter : parameters) addParameter(parameter); return this; } @NotNull public GeneralCommandLine withParameters(@NotNull List<String> parameters) { for (String parameter : parameters) addParameter(parameter); return this; } public void addParameter(@NotNull String parameter) { myProgramParams.add(parameter); } @NotNull public ParametersList getParametersList() { return myProgramParams; } @NotNull public Charset getCharset() { return myCharset; } @NotNull public GeneralCommandLine withCharset(@NotNull Charset charset) { myCharset = charset; return this; } public void setCharset(@NotNull Charset charset) { withCharset(charset); } public boolean isRedirectErrorStream() { return myRedirectErrorStream; } @NotNull public GeneralCommandLine withRedirectErrorStream(boolean redirectErrorStream) { myRedirectErrorStream = redirectErrorStream; return this; } public void setRedirectErrorStream(boolean redirectErrorStream) { withRedirectErrorStream(redirectErrorStream); } /** * Returns string representation of this command line.<br/> * Warning: resulting string is not OS-dependent - <b>do not</b> use it for executing this command line. * * @return single-string representation of this command line. */ @NotNull public String getCommandLineString() { return getCommandLineString(null); } /** * Returns string representation of this command line.<br/> * Warning: resulting string is not OS-dependent - <b>do not</b> use it for executing this command line. * * @param exeName use this executable name instead of given by {@link #setExePath(String)} * @return single-string representation of this command line. */ @NotNull public String getCommandLineString(@Nullable String exeName) { return ParametersList.join(getCommandLineList(exeName)); } @NotNull public List<String> getCommandLineList(@Nullable String exeName) { List<String> commands = new ArrayList<>(); if (exeName != null) { commands.add(exeName); } else if (myExePath != null) { commands.add(myExePath); } else { commands.add("<null>"); } commands.addAll(myProgramParams.getList()); return commands; } /** * Prepares command (quotes and escapes all arguments) and returns it as a newline-separated list * (suitable e.g. for passing in an environment variable). * * @param platform a target platform * @return command as a newline-separated list. */ @NotNull public String getPreparedCommandLine(@NotNull Platform platform) { String exePath = myExePath != null ? myExePath : ""; return StringUtil.join(CommandLineUtil.toCommandLine(exePath, myProgramParams.getList(), platform), "\n"); } @NotNull public Process createProcess() throws ExecutionException { if (LOG.isDebugEnabled()) { LOG.debug("Executing [" + getCommandLineString() + "]"); LOG.debug(" environment: " + myEnvParams + " (+" + myParentEnvironmentType + ")"); LOG.debug(" charset: " + myCharset); } List<String> commands; try { checkWorkingDirectory(); if (StringUtil.isEmptyOrSpaces(myExePath)) { throw new ExecutionException(IdeBundle.message("run.configuration.error.executable.not.specified")); } commands = CommandLineUtil.toCommandLine(myExePath, myProgramParams.getList()); } catch (ExecutionException e) { LOG.info(e); throw e; } try { return startProcess(commands); } catch (IOException e) { LOG.info(e); throw new ProcessNotCreatedException(e.getMessage(), e, this); } } @NotNull protected Process startProcess(@NotNull List<String> commands) throws IOException { ProcessBuilder builder = new ProcessBuilder(commands); setupEnvironment(builder.environment()); builder.directory(myWorkDirectory); builder.redirectErrorStream(myRedirectErrorStream); return builder.start(); } private void checkWorkingDirectory() throws ExecutionException { if (myWorkDirectory == null) { return; } if (!myWorkDirectory.exists()) { throw new ExecutionException(IdeBundle.message("run.configuration.error.working.directory.does.not.exist", myWorkDirectory)); } if (!myWorkDirectory.isDirectory()) { throw new ExecutionException(IdeBundle.message("run.configuration.error.working.directory.not.directory", myWorkDirectory)); } } protected void setupEnvironment(@NotNull Map<String, String> environment) { environment.clear(); if (myParentEnvironmentType != ParentEnvironmentType.NONE) { environment.putAll(getParentEnvironment()); } if (SystemInfo.isUnix) { File workDirectory = getWorkDirectory(); if (workDirectory != null) { environment.put("PWD", FileUtil.toSystemDependentName(workDirectory.getAbsolutePath())); } } if (!myEnvParams.isEmpty()) { if (SystemInfo.isWindows) { THashMap<String, String> envVars = new THashMap<>(CaseInsensitiveStringHashingStrategy.INSTANCE); envVars.putAll(environment); envVars.putAll(myEnvParams); environment.clear(); environment.putAll(envVars); } else { environment.putAll(myEnvParams); } } } /** * Normally, double quotes in parameters are escaped so they arrive to a called program as-is. * But some commands (e.g. {@code 'cmd /c start "title" ...'}) should get they quotes non-escaped. * Wrapping a parameter by this method (instead of using quotes) will do exactly this. * * @see com.intellij.execution.util.ExecUtil#getTerminalCommand(String, String) */ @NotNull public static String inescapableQuote(@NotNull String parameter) { return CommandLineUtil.specialQuote(parameter); } @Override public String toString() { return myExePath + " " + myProgramParams; } @Nullable @Override public <T> T getUserData(@NotNull Key<T> key) { if (myUserData != null) { @SuppressWarnings({"UnnecessaryLocalVariable", "unchecked"}) T t = (T)myUserData.get(key); return t; } return null; } @Override public <T> void putUserData(@NotNull Key<T> key, @Nullable T value) { if (myUserData == null) { if (value == null) return; myUserData = ContainerUtil.newHashMap(); } myUserData.put(key, value); } private static class MyTHashMap extends THashMap<String, String> { private MyTHashMap() { super(SystemInfo.isWindows ? CaseInsensitiveStringHashingStrategy.INSTANCE : ContainerUtil.canonicalStrategy()); } @Override public String put(String key, String value) { if (key == null || value == null) { LOG.error(new Exception("Nulls are not allowed")); return null; } if (key.isEmpty()) { // Windows: passing an environment variable with empty name causes "CreateProcess error=87, The parameter is incorrect" LOG.warn("Skipping environment variable with empty name, value: " + value); return null; } return super.put(key, value); } @Override public void putAll(Map<? extends String, ? extends String> map) { if (map != null) { super.putAll(map); } } } }
Add GeneralCommandLine.getEffectiveEnvironment()
platform/platform-api/src/com/intellij/execution/configurations/GeneralCommandLine.java
Add GeneralCommandLine.getEffectiveEnvironment()
Java
apache-2.0
fc3bb1444a0d4a48a955375c0658fd4f188fb509
0
crate/crate,crate/crate,puneetjaiswal/crate,sunny256/crate,aslanbekirov/crate,aslanbekirov/crate,sunny256/crate,aslanbekirov/crate,gmrodrigues/crate,EvilMcJerkface/crate,husky-koglhof/crate,adrpar/crate,sunny256/crate,adrpar/crate,crate/crate,puneetjaiswal/crate,husky-koglhof/crate,EvilMcJerkface/crate,gmrodrigues/crate,gmrodrigues/crate,puneetjaiswal/crate,husky-koglhof/crate,EvilMcJerkface/crate,adrpar/crate
/* * Licensed to CRATE Technology GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.types; import org.apache.lucene.util.BytesRef; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import static org.hamcrest.Matchers.is; import static org.junit.Assert.*; public class LongTypeTest { @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void testBytesRefToLongParsing() throws Exception { assertBytesRefParsing("12839", 12839L); assertBytesRefParsing("-12839", -12839L); assertBytesRefParsing(Long.toString(Long.MAX_VALUE), Long.MAX_VALUE); assertBytesRefParsing(Long.toString(Long.MIN_VALUE), Long.MIN_VALUE); assertBytesRefParsing("+2147483647111", 2147483647111L); } @Test public void testConversionWithNonAsciiCharacter() throws Exception { expectedException.expect(NumberFormatException.class); expectedException.expectMessage("\u03C0"); // "π" GREEK SMALL LETTER PI assertBytesRefParsing("\u03C0", 0L); } @Test public void testInvalidFirstChar() throws Exception { expectedException.expect(NumberFormatException.class); assertBytesRefParsing(" 1", 1L); } @Test public void testOnlyMinusSign() throws Exception { expectedException.expect(NumberFormatException.class); assertBytesRefParsing("-", 1L); } @Test public void testOnlyPlusSign() throws Exception { expectedException.expect(NumberFormatException.class); assertBytesRefParsing("+", 1L); } @Test public void testNumberThatIsGreaterThanMaxValue() throws Exception { expectedException.expect(NumberFormatException.class); assertBytesRefParsing(Long.toString(Long.MAX_VALUE) + "111", Long.MIN_VALUE); } private void assertBytesRefParsing(String s, long l) { assertThat(LongType.INSTANCE.value(new BytesRef(s)), is(l)); } }
core/src/test/java/io/crate/types/LongTypeTest.java
/* * Licensed to CRATE Technology GmbH ("Crate") under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. Crate licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial agreement. */ package io.crate.types; import org.apache.lucene.util.BytesRef; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import static org.hamcrest.Matchers.is; import static org.junit.Assert.*; public class LongTypeTest { @Rule public ExpectedException expectedException = ExpectedException.none(); @Test public void testBytesRefToLongParsing() throws Exception { assertBytesRefParsing("12839", 12839L); assertBytesRefParsing("-12839", -12839L); assertBytesRefParsing(Long.toString(Long.MAX_VALUE), Long.MAX_VALUE); assertBytesRefParsing(Long.toString(Long.MIN_VALUE), Long.MIN_VALUE); assertBytesRefParsing("+2147483647111", 2147483647111L); } @Test public void testConversionWithNonAsciiCharacter() throws Exception { expectedException.expect(NumberFormatException.class); expectedException.expectMessage("π"); assertBytesRefParsing("\u03C0", 0L); } @Test public void testInvalidFirstChar() throws Exception { expectedException.expect(NumberFormatException.class); assertBytesRefParsing(" 1", 1L); } @Test public void testOnlyMinusSign() throws Exception { expectedException.expect(NumberFormatException.class); assertBytesRefParsing("-", 1L); } @Test public void testOnlyPlusSign() throws Exception { expectedException.expect(NumberFormatException.class); assertBytesRefParsing("+", 1L); } @Test public void testNumberThatIsGreaterThanMaxValue() throws Exception { expectedException.expect(NumberFormatException.class); assertBytesRefParsing(Long.toString(Long.MAX_VALUE) + "111", Long.MIN_VALUE); } private void assertBytesRefParsing(String s, long l) { assertThat(LongType.INSTANCE.value(new BytesRef(s)), is(l)); } }
- testConversionWithNonAsciiCharacter unicode fix
core/src/test/java/io/crate/types/LongTypeTest.java
- testConversionWithNonAsciiCharacter unicode fix
Java
apache-2.0
55d6620cfdaf8280440029d4f0a719d64097faa6
0
qqming113/saiku,bft-cheb/saiku,wtstengshen/saiku,bisone/saiku,github-iis-soft-ru/saiku,wwf830527/saiku,zegang/saiku,pstoellberger/saiku,bisone/saiku,wtstengshen/saiku,wtstengshen/saiku,witcxc/saiku,pstoellberger/saiku,standino/saiku,newenter/saiku,zegang/saiku,dasbh/saiku,newenter/saiku,qixiaobo/saiku-self,OSBI/saiku,devgateway/ccrs-saiku,zegang/saiku,OSBI/saiku,wtstengshen/saiku,NAUMEN-GP/saiku,NAUMEN-GP/saiku,GermainSIGETY/sauceDallas-saiku3,qqming113/saiku,qqming113/saiku,hengyuan/saiku,standino/saiku,NAUMEN-GP/saiku,qixiaobo/saiku-self,wtstengshen/saiku,hengyuan/saiku,GermainSIGETY/sauceDallas-saiku3,wwf830527/saiku,bisone/saiku,qixiaobo/saiku-self,devgateway/ccrs-saiku,bft-cheb/saiku,dasbh/saiku,OSBI/saiku,dasbh/saiku,witcxc/saiku,standino/saiku,NAUMEN-GP/saiku,bisone/saiku,qqming113/saiku,wtstengshen/saiku,qqming113/saiku,GermainSIGETY/sauceDallas-saiku3,pstoellberger/saiku,github-iis-soft-ru/saiku,github-iis-soft-ru/saiku,dasbh/saiku,hengyuan/saiku,devgateway/ccrs-saiku,customme/saiku,standino/saiku,bft-cheb/saiku,dasbh/saiku,zegang/saiku,wwf830527/saiku,customme/saiku,witcxc/saiku,newenter/saiku,wwf830527/saiku,bft-cheb/saiku,customme/saiku,NAUMEN-GP/saiku,zegang/saiku,zegang/saiku,bisone/saiku,wwf830527/saiku,OSBI/saiku,OSBI/saiku,newenter/saiku,devgateway/ccrs-saiku,devgateway/ccrs-saiku,pstoellberger/saiku,qqming113/saiku,dasbh/saiku,qixiaobo/saiku-self,hengyuan/saiku,devgateway/ccrs-saiku,bisone/saiku,customme/saiku,standino/saiku,standino/saiku
/* * Copyright (C) 2011 Paul Stoellberger * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ package org.saiku.service.olap; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.olap4j.Axis; import org.olap4j.CellSet; import org.olap4j.OlapConnection; import org.olap4j.OlapException; import org.olap4j.OlapStatement; import org.olap4j.mdx.IdentifierNode; import org.olap4j.mdx.IdentifierSegment; import org.olap4j.metadata.Cube; import org.olap4j.metadata.Hierarchy; import org.olap4j.metadata.Level; import org.olap4j.query.Query; import org.olap4j.query.QueryAxis; import org.olap4j.query.QueryDimension; import org.olap4j.query.Selection; import org.saiku.olap.dto.SaikuCube; import org.saiku.olap.dto.SaikuDimensionSelection; import org.saiku.olap.dto.SaikuQuery; import org.saiku.olap.dto.resultset.CellDataSet; import org.saiku.olap.query.OlapQuery; import org.saiku.olap.query.QueryDeserializer; import org.saiku.olap.util.ObjectUtil; import org.saiku.olap.util.OlapResultSetUtil; import org.saiku.olap.util.exception.SaikuOlapException; import org.saiku.olap.util.formatter.CellSetFormatter; import org.saiku.olap.util.formatter.HierarchicalCellSetFormatter; import org.saiku.olap.util.formatter.ICellSetFormatter; import org.saiku.service.util.OlapUtil; import org.saiku.service.util.exception.SaikuServiceException; import org.saiku.service.util.export.CsvExporter; import org.saiku.service.util.export.ExcelExporter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OlapQueryService { private static final Logger log = LoggerFactory.getLogger(OlapQueryService.class); private OlapDiscoverService olapDiscoverService; private Map<String,OlapQuery> queries = new HashMap<String,OlapQuery>(); public void setOlapDiscoverService(OlapDiscoverService os) { olapDiscoverService = os; } public SaikuQuery createNewOlapQuery(String queryName, SaikuCube cube) { try { Cube cub = olapDiscoverService.getNativeCube(cube); if (cub != null) { OlapQuery query = new OlapQuery(new Query(queryName, cub),cube); queries.put(queryName, query); return ObjectUtil.convert(query); } } catch (Exception e) { log.error("Cannot create new query for cube :" + cube,e); } return null; } public SaikuQuery createNewOlapQuery(String name, String xml) { try { SaikuCube scube = QueryDeserializer.getFakeCube(xml); OlapConnection con = olapDiscoverService.getNativeConnection(scube.getConnectionName()); OlapQuery query = QueryDeserializer.unparse(xml, con); if (name == null) { queries.put(query.getName(), query); } else { queries.put(name, query); } return ObjectUtil.convert(query); } catch (Exception e) { throw new SaikuServiceException("Error creating query from xml",e); } } public void closeQuery(String queryName) { queries.remove(queryName); OlapUtil.deleteCellSet(queryName); } public List<String> getQueries() { List<String> queryList = new ArrayList<String>(); queryList.addAll(queries.keySet()); return queryList; } public SaikuQuery getQuery(String queryName) { OlapQuery q = getOlapQuery(queryName); return ObjectUtil.convert(q); } public void deleteQuery(String queryName) { queries.remove(queryName); } public CellDataSet execute(String queryName) { return execute(queryName,new HierarchicalCellSetFormatter()); } public CellDataSet execute(String queryName, String formatter) { formatter = formatter == null ? "" : formatter.toLowerCase(); if(formatter.equals("flat")) { return execute(queryName, new CellSetFormatter()); } else if (formatter.equals("hierarchical")) { return execute(queryName, new HierarchicalCellSetFormatter()); } return execute(queryName, new HierarchicalCellSetFormatter()); } public CellDataSet execute(String queryName, ICellSetFormatter formatter) { OlapQuery query = getOlapQuery(queryName); try { Long start = (new Date()).getTime(); CellSet cellSet = query.execute(); Long exec = (new Date()).getTime(); CellDataSet result = OlapResultSetUtil.cellSet2Matrix(cellSet,formatter); Long format = (new Date()).getTime(); log.info("Size: " + result.getWidth() + "/" + result.getHeight() + "\tExecute:\t" + (exec - start) + "ms\tFormat:\t" + (format - exec) + "ms\t Total: " + (format - start) + "ms"); OlapUtil.storeCellSet(queryName, cellSet); return result; } catch (Exception e) { throw new SaikuServiceException("Can't execute query: " + queryName,e); } } public ResultSet drilldown(String queryName, int maxrows) { try { final OlapConnection con = olapDiscoverService.getNativeConnection(getQuery(queryName).getCube().getConnectionName()); final OlapStatement stmt = con.createStatement(); String mdx = getMDXQuery(queryName); if (maxrows > 0) { mdx = "DRILLTHROUGH MAXROWS " + maxrows + " " + mdx; } else { mdx = "DRILLTHROUGH " + mdx; } return stmt.executeQuery(mdx); } catch (SQLException e) { throw new SaikuServiceException("Error DRILLTHROUGH: " + queryName,e); } } public void swapAxes(String queryName) { getOlapQuery(queryName).swapAxes(); } public boolean includeMember(String queryName, String dimensionName, String uniqueMemberName, String selectionType, int memberposition){ OlapQuery query = getOlapQuery(queryName); List<IdentifierSegment> memberList = IdentifierNode.parseIdentifier(uniqueMemberName).getSegmentList(); QueryDimension dimension = query.getDimension(dimensionName); final Selection.Operator selectionMode = Selection.Operator.valueOf(selectionType); try { Selection sel = dimension.createSelection(selectionMode, memberList); if (dimension.getInclusions().contains(sel)) { dimension.getInclusions().remove(sel); } if (memberposition < 0) { memberposition = dimension.getInclusions().size(); } dimension.getInclusions().add(memberposition, sel); return true; } catch (OlapException e) { throw new SaikuServiceException("Cannot include member query ("+queryName+") dimension (" + dimensionName + ") member ("+ uniqueMemberName+") operator (" + selectionType + ") position " + memberposition,e); } } public boolean removeMember(String queryName, String dimensionName, String uniqueMemberName, String selectionType) throws SaikuServiceException{ OlapQuery query = getOlapQuery(queryName); List<IdentifierSegment> memberList = IdentifierNode.parseIdentifier(uniqueMemberName).getSegmentList(); QueryDimension dimension = query.getDimension(dimensionName); final Selection.Operator selectionMode = Selection.Operator.valueOf(selectionType); try { if (log.isDebugEnabled()) { log.debug("query: "+queryName+" remove:" + selectionMode.toString() + " " + memberList.size()); } Selection selection = dimension.createSelection(selectionMode, memberList); dimension.getInclusions().remove(selection); if (dimension.getInclusions().size() == 0) { moveDimension(queryName, null, dimensionName, -1); } return true; } catch (OlapException e) { throw new SaikuServiceException("Error removing member (" + uniqueMemberName + ") of dimension (" +dimensionName+")",e); } } public boolean includeLevel(String queryName, String dimensionName, String uniqueHierarchyName, String uniqueLevelName) { OlapQuery query = getOlapQuery(queryName); QueryDimension dimension = query.getDimension(dimensionName); for (Hierarchy hierarchy : dimension.getDimension().getHierarchies()) { if (hierarchy.getUniqueName().equals(uniqueHierarchyName)) { for (Level level : hierarchy.getLevels()) { if (level.getUniqueName().equals(uniqueLevelName)) { Selection sel = dimension.createSelection(level); if (!dimension.getInclusions().contains(sel)) { dimension.include(level); } return true; } } } } return false; } public boolean removeLevel(String queryName, String dimensionName, String uniqueHierarchyName, String uniqueLevelName) { OlapQuery query = getOlapQuery(queryName); QueryDimension dimension = query.getDimension(dimensionName); try { for (Hierarchy hierarchy : dimension.getDimension().getHierarchies()) { if (hierarchy.getUniqueName().equals(uniqueHierarchyName)) { for (Level level : hierarchy.getLevels()) { if (level.getUniqueName().equals(uniqueLevelName)) { Selection inclusion = dimension.createSelection(level); dimension.getInclusions().remove(inclusion); if (dimension.getInclusions().size() == 0) { moveDimension(queryName, null , dimensionName, -1); } } } } } } catch (Exception e) { throw new SaikuServiceException("Cannot remove level" + uniqueLevelName + "from dimension " + dimensionName,e); } return true; } public void moveDimension(String queryName, String axisName, String dimensionName, int position) { try { if (log.isDebugEnabled()) { log.debug("move query: " + queryName + " dimension " + dimensionName + " to axis " + axisName + " position" + position); } OlapQuery query = getOlapQuery(queryName); QueryDimension dimension = query.getDimension(dimensionName); Axis newAxis = axisName != null ? ( "UNUSED".equals(axisName) ? null : Axis.Standard.valueOf(axisName)) : null; if(position==-1){ query.moveDimension(dimension, newAxis); } else{ query.moveDimension(dimension, newAxis, position); } } catch (Exception e) { throw new SaikuServiceException("Cannot move dimension:" + dimensionName + " to axis: "+axisName,e); } } public void removeDimension(String queryName, String axisName, String dimensionName) { OlapQuery query = getOlapQuery(queryName); moveDimension(queryName, "UNUSED" , dimensionName, -1); query.getDimension(dimensionName).getExclusions().clear(); query.getDimension(dimensionName).getInclusions().clear(); } public List<SaikuDimensionSelection> getAxisSelection(String queryName, String axis) { OlapQuery query = getOlapQuery(queryName); List<SaikuDimensionSelection> dimsel = new ArrayList<SaikuDimensionSelection>(); try { QueryAxis qaxis = query.getAxis(axis); if (qaxis != null) { for (QueryDimension dim : qaxis.getDimensions()) { dimsel.add(ObjectUtil.converDimensionSelection(dim)); } } } catch (SaikuOlapException e) { throw new SaikuServiceException("Cannot get dimension selections",e); } return dimsel; } public SaikuDimensionSelection getAxisDimensionSelections(String queryName, String axis, String dimension) { OlapQuery query = getOlapQuery(queryName); try { QueryAxis qaxis = query.getAxis(axis); if (qaxis != null) { QueryDimension dim = query.getDimension(dimension); if (dim != null) { return ObjectUtil.converDimensionSelection(dim); } else { throw new SaikuOlapException("Cannot find dimension with name:" + dimension); } } else { throw new SaikuOlapException("Cannot find axis with name:" + axis); } } catch (SaikuOlapException e) { throw new SaikuServiceException("Cannot get dimension selections",e); } } public void clearQuery(String queryName) { OlapQuery query = getOlapQuery(queryName); query.clearAllQuerySelections(); } public void clearAxis(String queryName, String axisName) { OlapQuery query = getOlapQuery(queryName); if (Axis.Standard.valueOf(axisName) != null) { QueryAxis qAxis = query.getAxis(Axis.Standard.valueOf(axisName)); query.resetAxisSelections(qAxis); for (QueryDimension dim : qAxis.getDimensions()) { qAxis.removeDimension(dim); } } } public void clearAxisSelections(String queryName, String axisName) { OlapQuery query = getOlapQuery(queryName); if (Axis.Standard.valueOf(axisName) != null) { QueryAxis qAxis = query.getAxis(Axis.Standard.valueOf(axisName)); query.resetAxisSelections(qAxis); } } public void resetQuery(String queryName) { OlapQuery query = getOlapQuery(queryName); query.resetQuery(); } public void setNonEmpty(String queryName, String axisName, boolean bool) { OlapQuery query = getOlapQuery(queryName); QueryAxis newAxis = query.getAxis(Axis.Standard.valueOf(axisName)); newAxis.setNonEmpty(bool); } public void setProperties(String queryName, Properties props) { OlapQuery query = getOlapQuery(queryName); query.setProperties(props); } public Properties getProperties(String queryName) { OlapQuery query = getOlapQuery(queryName); return query.getProperties(); } public String getMDXQuery(String queryName) { return getOlapQuery(queryName).getMdx(); } public String getQueryXml(String queryName) { OlapQuery query = getOlapQuery(queryName); return query.toXml(); } public byte[] getExport(String queryName, String type) { return getExport(queryName,type,new HierarchicalCellSetFormatter()); } public byte[] getExport(String queryName, String type, String formatter) { formatter = formatter == null ? "" : formatter.toLowerCase(); if (formatter.equals("flat")) { return getExport(queryName, type, new CellSetFormatter()); } else if (formatter.equals("hierarchical")) { return getExport(queryName, type, new HierarchicalCellSetFormatter()); } return getExport(queryName, type, new HierarchicalCellSetFormatter()); } public byte[] getExport(String queryName, String type, ICellSetFormatter formatter) { if (type != null) { CellSet rs = OlapUtil.getCellSet(queryName); if (type.toLowerCase().equals("xls")) { return ExcelExporter.exportExcel(rs,formatter); } if (type.toLowerCase().equals("csv")) { return CsvExporter.exportCsv(rs,",","\"", formatter); } } return new byte[0]; } private OlapQuery getOlapQuery(String queryName) { OlapQuery query = queries.get(queryName); if (query == null) { throw new SaikuServiceException("No query with name ("+queryName+") found"); } return query; } }
saiku-core/saiku-service/src/main/java/org/saiku/service/olap/OlapQueryService.java
/* * Copyright (C) 2011 Paul Stoellberger * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ package org.saiku.service.olap; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.olap4j.Axis; import org.olap4j.CellSet; import org.olap4j.OlapConnection; import org.olap4j.OlapException; import org.olap4j.OlapStatement; import org.olap4j.mdx.IdentifierNode; import org.olap4j.mdx.IdentifierSegment; import org.olap4j.metadata.Cube; import org.olap4j.metadata.Hierarchy; import org.olap4j.metadata.Level; import org.olap4j.query.Query; import org.olap4j.query.QueryAxis; import org.olap4j.query.QueryDimension; import org.olap4j.query.Selection; import org.saiku.olap.dto.SaikuCube; import org.saiku.olap.dto.SaikuDimensionSelection; import org.saiku.olap.dto.SaikuQuery; import org.saiku.olap.dto.resultset.CellDataSet; import org.saiku.olap.query.OlapQuery; import org.saiku.olap.query.QueryDeserializer; import org.saiku.olap.util.ObjectUtil; import org.saiku.olap.util.OlapResultSetUtil; import org.saiku.olap.util.exception.SaikuOlapException; import org.saiku.olap.util.formatter.CellSetFormatter; import org.saiku.olap.util.formatter.HierarchicalCellSetFormatter; import org.saiku.olap.util.formatter.ICellSetFormatter; import org.saiku.service.util.OlapUtil; import org.saiku.service.util.exception.SaikuServiceException; import org.saiku.service.util.export.CsvExporter; import org.saiku.service.util.export.ExcelExporter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class OlapQueryService { private static final Logger log = LoggerFactory.getLogger(OlapQueryService.class); private OlapDiscoverService olapDiscoverService; private Map<String,OlapQuery> queries = new HashMap<String,OlapQuery>(); public void setOlapDiscoverService(OlapDiscoverService os) { olapDiscoverService = os; } public SaikuQuery createNewOlapQuery(String queryName, SaikuCube cube) { try { Cube cub = olapDiscoverService.getNativeCube(cube); if (cub != null) { OlapQuery query = new OlapQuery(new Query(queryName, cub),cube); queries.put(queryName, query); return ObjectUtil.convert(query); } } catch (Exception e) { log.error("Cannot create new query for cube :" + cube,e); } return null; } public SaikuQuery createNewOlapQuery(String name, String xml) { try { SaikuCube scube = QueryDeserializer.getFakeCube(xml); OlapConnection con = olapDiscoverService.getNativeConnection(scube.getConnectionName()); OlapQuery query = QueryDeserializer.unparse(xml, con); if (name == null) { queries.put(query.getName(), query); } else { queries.put(name, query); } return ObjectUtil.convert(query); } catch (Exception e) { throw new SaikuServiceException("Error creating query from xml",e); } } public void closeQuery(String queryName) { queries.remove(queryName); OlapUtil.deleteCellSet(queryName); } public List<String> getQueries() { List<String> queryList = new ArrayList<String>(); queryList.addAll(queries.keySet()); return queryList; } public SaikuQuery getQuery(String queryName) { OlapQuery q = getOlapQuery(queryName); return ObjectUtil.convert(q); } public void deleteQuery(String queryName) { queries.remove(queryName); } public CellDataSet execute(String queryName) { return execute(queryName,new HierarchicalCellSetFormatter()); } public CellDataSet execute(String queryName, String formatter) { formatter = formatter == null ? "" : formatter.toLowerCase(); if(formatter.equals("flat")) { return execute(queryName, new CellSetFormatter()); } else if (formatter.equals("hierarchical")) { return execute(queryName, new HierarchicalCellSetFormatter()); } return execute(queryName, new HierarchicalCellSetFormatter()); } public CellDataSet execute(String queryName, ICellSetFormatter formatter) { OlapQuery query = getOlapQuery(queryName); try { Long start = (new Date()).getTime(); CellSet cellSet = query.execute(); Long exec = (new Date()).getTime(); CellDataSet result = OlapResultSetUtil.cellSet2Matrix(cellSet,formatter); Long format = (new Date()).getTime(); log.info("Size: " + result.getWidth() + "/" + result.getHeight() + "\tExecute:\t" + (exec - start) + "ms\tFormat:\t" + (format - exec) + "ms\t Total: " + (format - start) + "ms"); OlapUtil.storeCellSet(queryName, cellSet); return result; } catch (Exception e) { throw new SaikuServiceException("Can't execute query: " + queryName,e); } } public ResultSet drilldown(String queryName, int maxrows) { try { final OlapConnection con = olapDiscoverService.getNativeConnection(getQuery(queryName).getCube().getConnectionName()); final OlapStatement stmt = con.createStatement(); String mdx = getMDXQuery(queryName); if (maxrows > 0) { mdx = "DRILLTHROUGH MAXROWS " + maxrows + " " + mdx; } else { mdx = "DRILLTHROUGH " + mdx; } return stmt.executeQuery(mdx); } catch (SQLException e) { throw new SaikuServiceException("Error DRILLTHROUGH: " + queryName,e); } } public void swapAxes(String queryName) { getOlapQuery(queryName).swapAxes(); } public boolean includeMember(String queryName, String dimensionName, String uniqueMemberName, String selectionType, int memberposition){ OlapQuery query = getOlapQuery(queryName); List<IdentifierSegment> memberList = IdentifierNode.parseIdentifier(uniqueMemberName).getSegmentList(); QueryDimension dimension = query.getDimension(dimensionName); final Selection.Operator selectionMode = Selection.Operator.valueOf(selectionType); try { Selection sel = dimension.createSelection(selectionMode, memberList); if (dimension.getInclusions().contains(sel)) { dimension.getInclusions().remove(sel); } if (memberposition < 0) { memberposition = dimension.getInclusions().size(); } dimension.getInclusions().add(memberposition, sel); return true; } catch (OlapException e) { throw new SaikuServiceException("Cannot include member query ("+queryName+") dimension (" + dimensionName + ") member ("+ uniqueMemberName+") operator (" + selectionType + ") position " + memberposition,e); } } public boolean removeMember(String queryName, String dimensionName, String uniqueMemberName, String selectionType) throws SaikuServiceException{ OlapQuery query = getOlapQuery(queryName); List<IdentifierSegment> memberList = IdentifierNode.parseIdentifier(uniqueMemberName).getSegmentList(); QueryDimension dimension = query.getDimension(dimensionName); final Selection.Operator selectionMode = Selection.Operator.valueOf(selectionType); try { if (log.isDebugEnabled()) { log.debug("query: "+queryName+" remove:" + selectionMode.toString() + " " + memberList.size()); } Selection selection = dimension.createSelection(selectionMode, memberList); dimension.getInclusions().remove(selection); if (dimension.getInclusions().size() == 0) { moveDimension(queryName, null, dimensionName, -1); } return true; } catch (OlapException e) { throw new SaikuServiceException("Error removing member (" + uniqueMemberName + ") of dimension (" +dimensionName+")",e); } } public boolean includeLevel(String queryName, String dimensionName, String uniqueHierarchyName, String uniqueLevelName) { OlapQuery query = getOlapQuery(queryName); QueryDimension dimension = query.getDimension(dimensionName); for (Hierarchy hierarchy : dimension.getDimension().getHierarchies()) { if (hierarchy.getUniqueName().equals(uniqueHierarchyName)) { for (Level level : hierarchy.getLevels()) { if (level.getUniqueName().equals(uniqueLevelName)) { Selection sel = dimension.createSelection(level); if (!dimension.getInclusions().contains(sel)) { dimension.include(level); } return true; } } } } return false; } public boolean removeLevel(String queryName, String dimensionName, String uniqueHierarchyName, String uniqueLevelName) { OlapQuery query = getOlapQuery(queryName); QueryDimension dimension = query.getDimension(dimensionName); try { for (Hierarchy hierarchy : dimension.getDimension().getHierarchies()) { if (hierarchy.getUniqueName().equals(uniqueHierarchyName)) { for (Level level : hierarchy.getLevels()) { if (level.getUniqueName().equals(uniqueLevelName)) { Selection inclusion = dimension.createSelection(level); dimension.getInclusions().remove(inclusion); if (dimension.getInclusions().size() == 0) { moveDimension(queryName, null , dimensionName, -1); } } } } } } catch (Exception e) { throw new SaikuServiceException("Cannot remove level" + uniqueLevelName + "from dimension " + dimensionName,e); } return true; } public void moveDimension(String queryName, String axisName, String dimensionName, int position) { try { if (log.isDebugEnabled()) { log.debug("move query: " + queryName + " dimension " + dimensionName + " to axis " + axisName + " position" + position); } OlapQuery query = getOlapQuery(queryName); QueryDimension dimension = query.getDimension(dimensionName); Axis newAxis = axisName != null ? ( "UNUSED".equals(axisName) ? null : Axis.Standard.valueOf(axisName)) : null; if(position==-1){ query.moveDimension(dimension, newAxis); } else{ query.moveDimension(dimension, newAxis, position); } } catch (Exception e) { throw new SaikuServiceException("Cannot move dimension:" + dimensionName + " to axis: "+axisName,e); } } public void removeDimension(String queryName, String axisName, String dimensionName) { OlapQuery query = getOlapQuery(queryName); moveDimension(queryName, "UNUSED" , dimensionName, -1); } public List<SaikuDimensionSelection> getAxisSelection(String queryName, String axis) { OlapQuery query = getOlapQuery(queryName); List<SaikuDimensionSelection> dimsel = new ArrayList<SaikuDimensionSelection>(); try { QueryAxis qaxis = query.getAxis(axis); if (qaxis != null) { for (QueryDimension dim : qaxis.getDimensions()) { dimsel.add(ObjectUtil.converDimensionSelection(dim)); } } } catch (SaikuOlapException e) { throw new SaikuServiceException("Cannot get dimension selections",e); } return dimsel; } public SaikuDimensionSelection getAxisDimensionSelections(String queryName, String axis, String dimension) { OlapQuery query = getOlapQuery(queryName); try { QueryAxis qaxis = query.getAxis(axis); if (qaxis != null) { QueryDimension dim = query.getDimension(dimension); if (dim != null) { return ObjectUtil.converDimensionSelection(dim); } else { throw new SaikuOlapException("Cannot find dimension with name:" + dimension); } } else { throw new SaikuOlapException("Cannot find axis with name:" + axis); } } catch (SaikuOlapException e) { throw new SaikuServiceException("Cannot get dimension selections",e); } } public void clearQuery(String queryName) { OlapQuery query = getOlapQuery(queryName); query.clearAllQuerySelections(); } public void clearAxis(String queryName, String axisName) { OlapQuery query = getOlapQuery(queryName); if (Axis.Standard.valueOf(axisName) != null) { QueryAxis qAxis = query.getAxis(Axis.Standard.valueOf(axisName)); query.resetAxisSelections(qAxis); for (QueryDimension dim : qAxis.getDimensions()) { qAxis.removeDimension(dim); } } } public void clearAxisSelections(String queryName, String axisName) { OlapQuery query = getOlapQuery(queryName); if (Axis.Standard.valueOf(axisName) != null) { QueryAxis qAxis = query.getAxis(Axis.Standard.valueOf(axisName)); query.resetAxisSelections(qAxis); } } public void resetQuery(String queryName) { OlapQuery query = getOlapQuery(queryName); query.resetQuery(); } public void setNonEmpty(String queryName, String axisName, boolean bool) { OlapQuery query = getOlapQuery(queryName); QueryAxis newAxis = query.getAxis(Axis.Standard.valueOf(axisName)); newAxis.setNonEmpty(bool); } public void setProperties(String queryName, Properties props) { OlapQuery query = getOlapQuery(queryName); query.setProperties(props); } public Properties getProperties(String queryName) { OlapQuery query = getOlapQuery(queryName); return query.getProperties(); } public String getMDXQuery(String queryName) { return getOlapQuery(queryName).getMdx(); } public String getQueryXml(String queryName) { OlapQuery query = getOlapQuery(queryName); return query.toXml(); } public byte[] getExport(String queryName, String type) { return getExport(queryName,type,new HierarchicalCellSetFormatter()); } public byte[] getExport(String queryName, String type, String formatter) { formatter = formatter == null ? "" : formatter.toLowerCase(); if (formatter.equals("flat")) { return getExport(queryName, type, new CellSetFormatter()); } else if (formatter.equals("hierarchical")) { return getExport(queryName, type, new HierarchicalCellSetFormatter()); } return getExport(queryName, type, new HierarchicalCellSetFormatter()); } public byte[] getExport(String queryName, String type, ICellSetFormatter formatter) { if (type != null) { CellSet rs = OlapUtil.getCellSet(queryName); if (type.toLowerCase().equals("xls")) { return ExcelExporter.exportExcel(rs,formatter); } if (type.toLowerCase().equals("csv")) { return CsvExporter.exportCsv(rs,",","\"", formatter); } } return new byte[0]; } private OlapQuery getOlapQuery(String queryName) { OlapQuery query = queries.get(queryName); if (query == null) { throw new SaikuServiceException("No query with name ("+queryName+") found"); } return query; } }
clear inclusions etc. on removedimension
saiku-core/saiku-service/src/main/java/org/saiku/service/olap/OlapQueryService.java
clear inclusions etc. on removedimension
Java
apache-2.0
ef77d90f1b53ac2b0720ae255487f8046fbb9d3a
0
jangalinski/camunda-bpm-platform,plexiti/camunda-bpm-platform,clintmanning/new-empty,clintmanning/new-empty,fouasnon/camunda-bpm-platform,tcrossland/camunda-bpm-platform,camunda/camunda-bpm-platform,menski/camunda-bpm-platform,menski/camunda-bpm-platform,plexiti/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,plexiti/camunda-bpm-platform,holisticon/camunda-bpm-platform,Sumitdahiya/camunda,holisticon/camunda-bpm-platform,falko/camunda-bpm-platform,nibin/camunda-bpm-platform,joansmith/camunda-bpm-platform,filiphr/camunda-bpm-platform,holisticon/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,filiphr/camunda-bpm-platform,plexiti/camunda-bpm-platform,camunda/camunda-bpm-platform,fouasnon/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,camunda/camunda-bpm-platform,xasx/camunda-bpm-platform,falko/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,LuisePufahl/camunda-bpm-platform_batchProcessing,langfr/camunda-bpm-platform,rainerh/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,xasx/camunda-bpm-platform,tcrossland/camunda-bpm-platform,tcrossland/camunda-bpm-platform,Sumitdahiya/camunda,hupda-edpe/c,fouasnon/camunda-bpm-platform,langfr/camunda-bpm-platform,jangalinski/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,holisticon/camunda-bpm-platform,joansmith/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,langfr/camunda-bpm-platform,rainerh/camunda-bpm-platform,menski/camunda-bpm-platform,filiphr/camunda-bpm-platform,rainerh/camunda-bpm-platform,bentrm/camunda-bpm-platform,1and1/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,skjolber/camunda-bpm-platform,filiphr/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,joansmith/camunda-bpm-platform,bentrm/camunda-bpm-platform,jangalinski/camunda-bpm-platform,joansmith/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,nibin/camunda-bpm-platform,skjolber/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,ingorichtsmeier/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,xasx/camunda-bpm-platform,camunda/camunda-bpm-platform,xasx/camunda-bpm-platform,langfr/camunda-bpm-platform,nibin/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,hupda-edpe/c,bentrm/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,xasx/camunda-bpm-platform,nibin/camunda-bpm-platform,menski/camunda-bpm-platform,skjolber/camunda-bpm-platform,skjolber/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,hupda-edpe/c,subhrajyotim/camunda-bpm-platform,plexiti/camunda-bpm-platform,langfr/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,nagyistoce/camunda-bpm-platform,nibin/camunda-bpm-platform,AlexMinsk/camunda-bpm-platform,jangalinski/camunda-bpm-platform,camunda/camunda-bpm-platform,Sumitdahiya/camunda,jangalinski/camunda-bpm-platform,nibin/camunda-bpm-platform,rainerh/camunda-bpm-platform,skjolber/camunda-bpm-platform,1and1/camunda-bpm-platform,hupda-edpe/c,joansmith/camunda-bpm-platform,filiphr/camunda-bpm-platform,falko/camunda-bpm-platform,rainerh/camunda-bpm-platform,bentrm/camunda-bpm-platform,hupda-edpe/c,subhrajyotim/camunda-bpm-platform,falko/camunda-bpm-platform,langfr/camunda-bpm-platform,rainerh/camunda-bpm-platform,camunda/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,filiphr/camunda-bpm-platform,fouasnon/camunda-bpm-platform,hawky-4s-/camunda-bpm-platform,holisticon/camunda-bpm-platform,Sumitdahiya/camunda,tcrossland/camunda-bpm-platform,fouasnon/camunda-bpm-platform,holisticon/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,xasx/camunda-bpm-platform,menski/camunda-bpm-platform,Sumitdahiya/camunda,hawky-4s-/camunda-bpm-platform,falko/camunda-bpm-platform,subhrajyotim/camunda-bpm-platform,LuisePufahl/camunda-bpm-platform_batchProcessing,tcrossland/camunda-bpm-platform,hupda-edpe/c,hawky-4s-/camunda-bpm-platform,jangalinski/camunda-bpm-platform,Sumitdahiya/camunda,falko/camunda-bpm-platform,tcrossland/camunda-bpm-platform,bentrm/camunda-bpm-platform,skjolber/camunda-bpm-platform,1and1/camunda-bpm-platform,bentrm/camunda-bpm-platform,fouasnon/camunda-bpm-platform,1and1/camunda-bpm-platform,clintmanning/new-empty,plexiti/camunda-bpm-platform,joansmith/camunda-bpm-platform
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.impl.webservice; import java.util.logging.Level; import java.util.logging.Logger; import org.activiti.engine.impl.bpmn.MessageDefinition; import org.activiti.engine.impl.bpmn.MessageInstance; import org.activiti.engine.impl.bpmn.Operation; import org.activiti.engine.impl.bpmn.OperationImplementation; /** * Represents a WS implementation of a {@link Operation} * * @author Esteban Robles Luna */ public class WSOperation implements OperationImplementation { private static final Logger LOGGER = Logger.getLogger(WSOperation.class.getName()); protected String id; protected String name; protected WSService service; public WSOperation(String id, String operationName, WSService service) { this.id = id; this.name = operationName; this.service = service; } /** * {@inheritDoc} */ public String getId() { return this.id; } /** * {@inheritDoc} */ public String getName() { return this.name; } /** * {@inheritDoc} */ public MessageInstance sendFor(MessageInstance message, Operation operation) { Object[] arguments = this.getArguments(message); Object[] results = this.safeSend(arguments); return this.createResponseMessage(results, operation); } private Object[] getArguments(MessageInstance message) { return message.getStructureInstance().toArray(); } private Object[] safeSend(Object[] arguments) { Object[] results = null; try { results = this.service.getClient().send(this.name, arguments); } catch (Exception e) { LOGGER.log(Level.WARNING, "Error calling WS " + this.service.getName(), e); } if (results == null) { results = new Object[] {}; } return results; } private MessageInstance createResponseMessage(Object[] results, Operation operation) { MessageInstance message = null; MessageDefinition outMessage = operation.getOutMessage(); if (outMessage != null) { message = outMessage.createInstance(); message.getStructureInstance().loadFrom(results); } return message; } public WSService getService() { return this.service; } }
activiti-engine/src/main/java/org/activiti/engine/impl/webservice/WSOperation.java
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.impl.webservice; import java.util.logging.Level; import java.util.logging.Logger; import org.activiti.engine.impl.bpmn.MessageInstance; import org.activiti.engine.impl.bpmn.Operation; import org.activiti.engine.impl.bpmn.OperationImplementation; /** * Represents a WS implementation of a {@link Operation} * * @author Esteban Robles Luna */ public class WSOperation implements OperationImplementation { private static final Logger LOGGER = Logger.getLogger(WSOperation.class.getName()); protected String id; protected String name; protected WSService service; public WSOperation(String id, String operationName, WSService service) { this.id = id; this.name = operationName; this.service = service; } /** * {@inheritDoc} */ public String getId() { return this.id; } /** * {@inheritDoc} */ public String getName() { return this.name; } /** * {@inheritDoc} */ public MessageInstance sendFor(MessageInstance message, Operation operation) { Object[] arguments = this.getArguments(message); Object[] results = this.safeSend(arguments); return this.createResponseMessage(results, operation); } private Object[] getArguments(MessageInstance message) { return message.getStructureInstance().toArray(); } private Object[] safeSend(Object[] arguments) { Object[] results = null; try { results = this.service.getClient().send(this.name, arguments); } catch (Exception e) { LOGGER.log(Level.WARNING, "Error calling WS " + this.service.getName(), e); } if (results == null) { results = new Object[] {}; } return results; } private MessageInstance createResponseMessage(Object[] results, Operation operation) { MessageInstance message = operation.getOutMessage().createInstance(); message.getStructureInstance().loadFrom(results); return message; } public WSService getService() { return this.service; } }
Fixed NPE in WSOperation
activiti-engine/src/main/java/org/activiti/engine/impl/webservice/WSOperation.java
Fixed NPE in WSOperation
Java
apache-2.0
e53b2eb9a9f66832f83516148d52ffe140e44831
0
slisson/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,ernestp/consulo,salguarnieri/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,supersven/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,holmes/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,retomerz/intellij-community,blademainer/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,jagguli/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,caot/intellij-community,adedayo/intellij-community,fnouama/intellij-community,slisson/intellij-community,signed/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,caot/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,kdwink/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,da1z/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,kdwink/intellij-community,fitermay/intellij-community,samthor/intellij-community,kool79/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,izonder/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,FHannes/intellij-community,fitermay/intellij-community,ryano144/intellij-community,samthor/intellij-community,hurricup/intellij-community,diorcety/intellij-community,xfournet/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,blademainer/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,semonte/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,xfournet/intellij-community,kdwink/intellij-community,caot/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,apixandru/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,supersven/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,consulo/consulo,ibinti/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,izonder/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ibinti/intellij-community,da1z/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,ernestp/consulo,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,dslomov/intellij-community,blademainer/intellij-community,adedayo/intellij-community,vladmm/intellij-community,vladmm/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,petteyg/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,signed/intellij-community,wreckJ/intellij-community,consulo/consulo,michaelgallacher/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,da1z/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,gnuhub/intellij-community,caot/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,jagguli/intellij-community,hurricup/intellij-community,retomerz/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,supersven/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,kdwink/intellij-community,asedunov/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,apixandru/intellij-community,ernestp/consulo,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,FHannes/intellij-community,vladmm/intellij-community,dslomov/intellij-community,allotria/intellij-community,allotria/intellij-community,apixandru/intellij-community,hurricup/intellij-community,asedunov/intellij-community,clumsy/intellij-community,apixandru/intellij-community,consulo/consulo,TangHao1987/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,samthor/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,signed/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,ernestp/consulo,vladmm/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,caot/intellij-community,ernestp/consulo,SerCeMan/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,slisson/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,slisson/intellij-community,allotria/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,slisson/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,holmes/intellij-community,xfournet/intellij-community,semonte/intellij-community,kdwink/intellij-community,izonder/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,hurricup/intellij-community,samthor/intellij-community,retomerz/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,ibinti/intellij-community,ryano144/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,caot/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,semonte/intellij-community,supersven/intellij-community,supersven/intellij-community,ahb0327/intellij-community,supersven/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,FHannes/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,consulo/consulo,semonte/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,blademainer/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,allotria/intellij-community,amith01994/intellij-community,samthor/intellij-community,xfournet/intellij-community,amith01994/intellij-community,semonte/intellij-community,adedayo/intellij-community,caot/intellij-community,hurricup/intellij-community,diorcety/intellij-community,izonder/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,da1z/intellij-community,jagguli/intellij-community,allotria/intellij-community,samthor/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,hurricup/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,holmes/intellij-community,signed/intellij-community,semonte/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,da1z/intellij-community,adedayo/intellij-community,FHannes/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,kool79/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,consulo/consulo,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,holmes/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,kool79/intellij-community,holmes/intellij-community,diorcety/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,robovm/robovm-studio,allotria/intellij-community,diorcety/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,consulo/consulo,ol-loginov/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,amith01994/intellij-community,adedayo/intellij-community,supersven/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,signed/intellij-community,allotria/intellij-community,xfournet/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,da1z/intellij-community,kdwink/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,samthor/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,amith01994/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,petteyg/intellij-community,dslomov/intellij-community,retomerz/intellij-community,dslomov/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,gnuhub/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,clumsy/intellij-community,amith01994/intellij-community,holmes/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,signed/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,slisson/intellij-community,signed/intellij-community,dslomov/intellij-community,diorcety/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,allotria/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,amith01994/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,da1z/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,FHannes/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,caot/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,hurricup/intellij-community,slisson/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,fnouama/intellij-community,kdwink/intellij-community,asedunov/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,signed/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,fnouama/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,asedunov/intellij-community,amith01994/intellij-community,izonder/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,izonder/intellij-community,signed/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,semonte/intellij-community,slisson/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,clumsy/intellij-community,allotria/intellij-community,vvv1559/intellij-community,ernestp/consulo,FHannes/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,asedunov/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,jagguli/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,robovm/robovm-studio,lucafavatella/intellij-community,ryano144/intellij-community,xfournet/intellij-community,fnouama/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.cvsSupport2.actions; import com.intellij.CvsBundle; import com.intellij.cvsSupport2.actions.cvsContext.CvsContext; import com.intellij.cvsSupport2.actions.cvsContext.CvsContextWrapper; import com.intellij.cvsSupport2.config.CvsRootConfiguration; import com.intellij.cvsSupport2.config.ui.SelectCvsConfigurationDialog; import com.intellij.cvsSupport2.connections.CvsEnvironment; import com.intellij.cvsSupport2.cvsBrowser.ui.BrowserPanel; import com.intellij.cvsSupport2.cvsExecution.ModalityContext; import com.intellij.cvsSupport2.cvsExecution.ModalityContextImpl; import com.intellij.cvsSupport2.cvshandlers.AbstractCvsHandler; import com.intellij.cvsSupport2.cvshandlers.CvsHandler; import com.intellij.cvsSupport2.cvshandlers.FileSetToBeUpdated; import com.intellij.cvsSupport2.cvsoperations.common.LoginPerformer; import com.intellij.cvsSupport2.ui.CvsTabbedWindow; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.actions.VcsContext; import com.intellij.util.Consumer; import java.util.Collections; /** * author: lesya */ public class BrowseCvsRepositoryAction extends AbstractAction implements DumbAware { private static final String TITLE = CvsBundle.message("operation.name.browse.repository"); private CvsRootConfiguration mySelectedConfiguration; public BrowseCvsRepositoryAction() { super(false); } public void update(AnActionEvent e) { Presentation presentation = e.getPresentation(); VcsContext context = CvsContextWrapper.createInstance(e); boolean projectExists = context.getProject() != null; presentation.setVisible(projectExists); presentation.setEnabled(projectExists); } protected String getTitle(VcsContext context) { return TITLE; } protected CvsHandler getCvsHandler(CvsContext context) { SelectCvsConfigurationDialog selectCvsConfigurationDialog = new SelectCvsConfigurationDialog(context.getProject()); selectCvsConfigurationDialog.show(); if (!selectCvsConfigurationDialog.isOK()) return CvsHandler.NULL; mySelectedConfiguration = selectCvsConfigurationDialog.getSelectedConfiguration(); return new MyCvsHandler(context.getProject()); } protected void onActionPerformed(CvsContext context, CvsTabbedWindow tabbedWindow, boolean successfully, CvsHandler handler) { if (mySelectedConfiguration == null) return; if (! loginImpl(context.getProject(), new ModalityContextImpl(ModalityState.NON_MODAL, false), new Consumer<VcsException>() { public void consume(VcsException e) { // } })) return; super.onActionPerformed(context, tabbedWindow, successfully, handler); if (successfully){ Project project = context.getProject(); LOG.assertTrue(project != null); LOG.assertTrue(mySelectedConfiguration != null); final BrowserPanel browserPanel = new BrowserPanel(mySelectedConfiguration, project); tabbedWindow.addTab(TITLE, browserPanel, true, true, true, true, browserPanel.getActionGroup(), "cvs.browse"); tabbedWindow.ensureVisible(project); } } private class MyCvsHandler extends AbstractCvsHandler { private final Project myProject; public MyCvsHandler(Project project) { super(TITLE, FileSetToBeUpdated.EMPTY); myProject = project; } public boolean isCanceled() { return false; } protected int getFilesToProcessCount() { return 0; } public boolean login(ModalityContext executor) throws Exception { return loginImpl(myProject, executor, new Consumer<VcsException>() { public void consume(VcsException e) { myErrors.add(e); } }); /*final LoginPerformer.MyProjectKnown performer = new LoginPerformer.MyProjectKnown(myProject, Collections.<CvsEnvironment>singletonList(mySelectedConfiguration), new Consumer<VcsException>() { public void consume(VcsException e) { myErrors.add(e); } }); return performer.loginAll(executor, false);*/ } } public boolean loginImpl(final Project project, final ModalityContext executor, final Consumer<VcsException> exceptionConsumer) { final LoginPerformer.MyProjectKnown performer = new LoginPerformer.MyProjectKnown(project, Collections.<CvsEnvironment>singletonList(mySelectedConfiguration), exceptionConsumer); return performer.loginAll(executor, false); } }
plugins/cvs/cvs-plugin/src/com/intellij/cvsSupport2/actions/BrowseCvsRepositoryAction.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.cvsSupport2.actions; import com.intellij.CvsBundle; import com.intellij.cvsSupport2.actions.cvsContext.CvsContext; import com.intellij.cvsSupport2.actions.cvsContext.CvsContextWrapper; import com.intellij.cvsSupport2.config.CvsRootConfiguration; import com.intellij.cvsSupport2.config.ui.SelectCvsConfigurationDialog; import com.intellij.cvsSupport2.connections.CvsEnvironment; import com.intellij.cvsSupport2.cvsBrowser.ui.BrowserPanel; import com.intellij.cvsSupport2.cvsExecution.ModalityContext; import com.intellij.cvsSupport2.cvsExecution.ModalityContextImpl; import com.intellij.cvsSupport2.cvshandlers.AbstractCvsHandler; import com.intellij.cvsSupport2.cvshandlers.CvsHandler; import com.intellij.cvsSupport2.cvshandlers.FileSetToBeUpdated; import com.intellij.cvsSupport2.cvsoperations.common.LoginPerformer; import com.intellij.cvsSupport2.ui.CvsTabbedWindow; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.actions.VcsContext; import com.intellij.util.Consumer; import java.util.Collections; /** * author: lesya */ public class BrowseCvsRepositoryAction extends AbstractAction implements DumbAware { private static final String TITLE = CvsBundle.message("operation.name.browse.repository"); private CvsRootConfiguration mySelectedConfiguration; public BrowseCvsRepositoryAction() { super(false); } public void update(AnActionEvent e) { Presentation presentation = e.getPresentation(); VcsContext context = CvsContextWrapper.createInstance(e); boolean projectExists = context.getProject() != null; presentation.setVisible(true); presentation.setEnabled(projectExists); } protected String getTitle(VcsContext context) { return TITLE; } protected CvsHandler getCvsHandler(CvsContext context) { SelectCvsConfigurationDialog selectCvsConfigurationDialog = new SelectCvsConfigurationDialog(context.getProject()); selectCvsConfigurationDialog.show(); if (!selectCvsConfigurationDialog.isOK()) return CvsHandler.NULL; mySelectedConfiguration = selectCvsConfigurationDialog.getSelectedConfiguration(); return new MyCvsHandler(context.getProject()); } protected void onActionPerformed(CvsContext context, CvsTabbedWindow tabbedWindow, boolean successfully, CvsHandler handler) { if (mySelectedConfiguration == null) return; if (! loginImpl(context.getProject(), new ModalityContextImpl(ModalityState.NON_MODAL, false), new Consumer<VcsException>() { public void consume(VcsException e) { // } })) return; super.onActionPerformed(context, tabbedWindow, successfully, handler); if (successfully){ Project project = context.getProject(); LOG.assertTrue(project != null); LOG.assertTrue(mySelectedConfiguration != null); final BrowserPanel browserPanel = new BrowserPanel(mySelectedConfiguration, project); tabbedWindow.addTab(TITLE, browserPanel, true, true, true, true, browserPanel.getActionGroup(), "cvs.browse"); tabbedWindow.ensureVisible(project); } } private class MyCvsHandler extends AbstractCvsHandler { private final Project myProject; public MyCvsHandler(Project project) { super(TITLE, FileSetToBeUpdated.EMPTY); myProject = project; } public boolean isCanceled() { return false; } protected int getFilesToProcessCount() { return 0; } public boolean login(ModalityContext executor) throws Exception { return loginImpl(myProject, executor, new Consumer<VcsException>() { public void consume(VcsException e) { myErrors.add(e); } }); /*final LoginPerformer.MyProjectKnown performer = new LoginPerformer.MyProjectKnown(myProject, Collections.<CvsEnvironment>singletonList(mySelectedConfiguration), new Consumer<VcsException>() { public void consume(VcsException e) { myErrors.add(e); } }); return performer.loginAll(executor, false);*/ } } public boolean loginImpl(final Project project, final ModalityContext executor, final Consumer<VcsException> exceptionConsumer) { final LoginPerformer.MyProjectKnown performer = new LoginPerformer.MyProjectKnown(project, Collections.<CvsEnvironment>singletonList(mySelectedConfiguration), exceptionConsumer); return performer.loginAll(executor, false); } }
CVS: browse repository action is always disabled for default project -> do not show
plugins/cvs/cvs-plugin/src/com/intellij/cvsSupport2/actions/BrowseCvsRepositoryAction.java
CVS: browse repository action is always disabled for default project -> do not show
Java
apache-2.0
60404dd7498a316683f70f6b37e318f63e04cdd1
0
samthor/intellij-community,asedunov/intellij-community,izonder/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,apixandru/intellij-community,samthor/intellij-community,holmes/intellij-community,hurricup/intellij-community,holmes/intellij-community,holmes/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,signed/intellij-community,Lekanich/intellij-community,izonder/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,supersven/intellij-community,robovm/robovm-studio,ibinti/intellij-community,hurricup/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,supersven/intellij-community,ibinti/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,slisson/intellij-community,petteyg/intellij-community,supersven/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,ernestp/consulo,tmpgit/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,suncycheng/intellij-community,signed/intellij-community,xfournet/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,jagguli/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,blademainer/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,samthor/intellij-community,orekyuu/intellij-community,samthor/intellij-community,samthor/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,da1z/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,diorcety/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,signed/intellij-community,allotria/intellij-community,asedunov/intellij-community,vladmm/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,caot/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,ernestp/consulo,ahb0327/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,ibinti/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,semonte/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,caot/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,ernestp/consulo,apixandru/intellij-community,ryano144/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,signed/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,dslomov/intellij-community,petteyg/intellij-community,robovm/robovm-studio,asedunov/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,samthor/intellij-community,orekyuu/intellij-community,semonte/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,izonder/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,samthor/intellij-community,amith01994/intellij-community,FHannes/intellij-community,slisson/intellij-community,jagguli/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,caot/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,signed/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,ryano144/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,xfournet/intellij-community,hurricup/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,caot/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,slisson/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,retomerz/intellij-community,diorcety/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,retomerz/intellij-community,signed/intellij-community,slisson/intellij-community,semonte/intellij-community,robovm/robovm-studio,izonder/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,consulo/consulo,adedayo/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,suncycheng/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,allotria/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,caot/intellij-community,semonte/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,caot/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,apixandru/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,xfournet/intellij-community,semonte/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,consulo/consulo,mglukhikh/intellij-community,vvv1559/intellij-community,semonte/intellij-community,izonder/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,signed/intellij-community,asedunov/intellij-community,fitermay/intellij-community,amith01994/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,semonte/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,ibinti/intellij-community,caot/intellij-community,blademainer/intellij-community,signed/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,consulo/consulo,SerCeMan/intellij-community,ernestp/consulo,kdwink/intellij-community,semonte/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,xfournet/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,samthor/intellij-community,fitermay/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,xfournet/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,amith01994/intellij-community,vladmm/intellij-community,kdwink/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,semonte/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,apixandru/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,asedunov/intellij-community,dslomov/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,dslomov/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,fitermay/intellij-community,supersven/intellij-community,vladmm/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,petteyg/intellij-community,consulo/consulo,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,vladmm/intellij-community,dslomov/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,retomerz/intellij-community,holmes/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,ryano144/intellij-community,samthor/intellij-community,izonder/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,slisson/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,FHannes/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,fnouama/intellij-community,consulo/consulo,hurricup/intellij-community,apixandru/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,holmes/intellij-community,robovm/robovm-studio,semonte/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,slisson/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,jagguli/intellij-community,asedunov/intellij-community,adedayo/intellij-community,apixandru/intellij-community,holmes/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,signed/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,retomerz/intellij-community,ernestp/consulo,lucafavatella/intellij-community,adedayo/intellij-community,FHannes/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,consulo/consulo,salguarnieri/intellij-community,robovm/robovm-studio,supersven/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,ibinti/intellij-community,fnouama/intellij-community,fnouama/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,supersven/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,asedunov/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,samthor/intellij-community,holmes/intellij-community,ernestp/consulo,kool79/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,caot/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,signed/intellij-community,orekyuu/intellij-community,allotria/intellij-community,suncycheng/intellij-community,vladmm/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,supersven/intellij-community,da1z/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,caot/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,asedunov/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,clumsy/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,da1z/intellij-community,retomerz/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.completion; import com.intellij.codeInsight.completion.impl.CompletionServiceImpl; import com.intellij.codeInsight.hint.EditorHintListener; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.lookup.*; import com.intellij.codeInsight.lookup.impl.LookupImpl; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.util.ProgressIndicatorBase; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.patterns.ElementPattern; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiReference; import com.intellij.psi.ReferenceRange; import com.intellij.psi.util.PsiUtilBase; import com.intellij.ui.LightweightHint; import com.intellij.util.ObjectUtils; import com.intellij.util.concurrency.Semaphore; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.update.MergingUpdateQueue; import com.intellij.util.ui.update.Update; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.util.Collections; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; /** * @author peter */ public class CompletionProgressIndicator extends ProgressIndicatorBase implements CompletionProcess{ private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.CompletionProgressIndicator"); private final Editor myEditor; private final CompletionParameters myParameters; private final CodeCompletionHandlerBase myHandler; private final LookupImpl myLookup; private final MergingUpdateQueue myQueue; private final Update myUpdate = new Update("update") { public void run() { updateLookup(); myQueue.setMergingTimeSpan(100); } }; private final Semaphore myFreezeSemaphore; private final OffsetMap myOffsetMap; private final CopyOnWriteArrayList<Pair<Integer, ElementPattern<String>>> myRestartingPrefixConditions = ContainerUtil.createEmptyCOWList(); private final LookupAdapter myLookupListener = new LookupAdapter() { public void itemSelected(LookupEvent event) { ensureDuringCompletionPassed(); finishCompletionProcess(); LookupElement item = event.getItem(); if (item == null) return; setMergeCommand(); myOffsetMap.addOffset(CompletionInitializationContext.START_OFFSET, myEditor.getCaretModel().getOffset() - item.getLookupString().length()); CodeCompletionHandlerBase.selectLookupItem(item, event.getCompletionChar(), CompletionProgressIndicator.this, myLookup.getItems()); } public void lookupCanceled(final LookupEvent event) { finishCompletionProcess(); } }; private final Semaphore myDuringCompletionSemaphore = new Semaphore(); private volatile int myCount; public CompletionProgressIndicator(final Editor editor, CompletionParameters parameters, CodeCompletionHandlerBase handler, Semaphore freezeSemaphore, final OffsetMap offsetMap, LookupImpl lookup) { myEditor = editor; myParameters = parameters; myHandler = handler; myFreezeSemaphore = freezeSemaphore; myOffsetMap = offsetMap; myLookup = lookup; myLookup.setArranger(new CompletionLookupArranger(parameters)); myLookup.addLookupListener(myLookupListener); myLookup.setCalculating(true); myQueue = new MergingUpdateQueue("completion lookup progress", 200, true, myEditor.getContentComponent()); ApplicationManager.getApplication().assertIsDispatchThread(); registerItself(); if (!ApplicationManager.getApplication().isUnitTestMode() && !lookup.isShown()) { scheduleAdvertising(); } trackModifiers(); myDuringCompletionSemaphore.down(); } public OffsetMap getOffsetMap() { return myOffsetMap; } public int getSelectionEndOffset() { return getOffsetMap().getOffset(CompletionInitializationContext.SELECTION_END_OFFSET); } void duringCompletion(CompletionInitializationContext initContext) { try { ProgressManager.checkCanceled(); if (!initContext.getOffsetMap().wasModified(CompletionInitializationContext.IDENTIFIER_END_OFFSET)) { try { final int selectionEndOffset = initContext.getSelectionEndOffset(); final PsiReference reference = initContext.getFile().findReferenceAt(selectionEndOffset); if (reference != null) { initContext.setReplacementOffset(findReplacementOffset(selectionEndOffset, reference)); } } catch (IndexNotReadyException ignored) { } } for (CompletionContributor contributor : CompletionContributor.forLanguage(initContext.getPositionLanguage())) { if (DumbService.getInstance(initContext.getProject()).isDumb() && !DumbService.isDumbAware(contributor)) { continue; } contributor.duringCompletion(initContext); } } catch (ProcessCanceledException ignored) { } finally { duringCompletionPassed(); } } void duringCompletionPassed() { myDuringCompletionSemaphore.up(); } void ensureDuringCompletionPassed() { myDuringCompletionSemaphore.waitFor(); } public void setFocusLookupWhenDone(boolean focusLookup) { LOG.assertTrue(isAutopopupCompletion()); if (focusLookup) { ((CompletionPhase.BgCalculation)CompletionServiceImpl.getCompletionPhase()).focusLookupWhenDone = true; } else { myLookup.setAdvertisementText("Press " + CompletionContributor.getActionShortcut(IdeActions.ACTION_CHOOSE_LOOKUP_ITEM_REPLACE) + " to choose the first suggestion"); } } private static int findReplacementOffset(int selectionEndOffset, PsiReference reference) { final List<TextRange> ranges = ReferenceRange.getAbsoluteRanges(reference); for (TextRange range : ranges) { if (range.contains(selectionEndOffset)) { return range.getEndOffset(); } } return reference.getElement().getTextRange().getStartOffset() + reference.getRangeInElement().getEndOffset(); } private void scheduleAdvertising() { ApplicationManager.getApplication().executeOnPooledThread(new Runnable() { public void run() { if (isOutdated()) return; //tests? final List<CompletionContributor> list = ApplicationManager.getApplication().runReadAction(new Computable<List<CompletionContributor>>() { public List<CompletionContributor> compute() { if (isOutdated()) { return Collections.emptyList(); } return CompletionContributor.forParameters(myParameters); } }); for (final CompletionContributor contributor : list) { if (myLookup.getAdvertisementText() != null) return; if (!myLookup.isCalculating() && !myLookup.isVisible()) return; String s = ApplicationManager.getApplication().runReadAction(new Computable<String>() { @Nullable public String compute() { if (isOutdated()) { return null; } return contributor.advertise(myParameters); } }); if (myLookup.getAdvertisementText() != null) return; if (s != null) { myLookup.setAdvertisementText(s); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { if (isAutopopupCompletion() && !myLookup.isShown()) { return; } if (!CompletionServiceImpl.isPhase(CompletionPhase.BgCalculation.class, CompletionPhase.ItemsCalculated.class)) { return; } if (CompletionServiceImpl.getCompletionPhase().indicator != CompletionProgressIndicator.this) { return; } updateLookup(); } }, myQueue.getModalityState()); return; } } } }); } private boolean isOutdated() { return CompletionServiceImpl.getCompletionPhase().indicator != this; } private void trackModifiers() { if (isAutopopupCompletion()) { return; } final JComponent contentComponent = myEditor.getContentComponent(); contentComponent.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent e) { processModifier(e); } public void keyReleased(KeyEvent e) { processModifier(e); } private void processModifier(KeyEvent e) { final int code = e.getKeyCode(); if (code == KeyEvent.VK_CONTROL || code == KeyEvent.VK_META || code == KeyEvent.VK_ALT || code == KeyEvent.VK_SHIFT) { contentComponent.removeKeyListener(this); final CompletionPhase phase = CompletionServiceImpl.getCompletionPhase(); if (phase instanceof CompletionPhase.BgCalculation) { ((CompletionPhase.BgCalculation)phase).modifiersChanged = true; } else if (phase instanceof CompletionPhase.ZombiePhase) { CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion); } } } }); } private void setMergeCommand() { CommandProcessor.getInstance().setCurrentCommandGroupId(getCompletionCommandName()); } private String getCompletionCommandName() { return "Completion" + hashCode(); } public void showLookup() { updateLookup(); } public CompletionParameters getParameters() { return myParameters; } private void registerItself() { CompletionServiceImpl.getCompletionService().setCurrentCompletion(this); } public CodeCompletionHandlerBase getHandler() { return myHandler; } public LookupImpl getLookup() { return myLookup; } private void updateLookup() { ApplicationManager.getApplication().assertIsDispatchThread(); if (isOutdated()) return; if (!myLookup.isShown()) { if (hideAutopopupIfMeaningless()) { return; } if (StringUtil.isEmpty(myLookup.getAdvertisementText()) && !isAutopopupCompletion()) { final String text = DefaultCompletionContributor.getDefaultAdvertisementText(myParameters); if (text != null) { myLookup.setAdvertisementText(text); } } myLookup.show(); } myLookup.refreshUi(); hideAutopopupIfMeaningless(); updateFocus(); } final boolean isInsideIdentifier() { return getIdentifierEndOffset() != getSelectionEndOffset(); } public int getIdentifierEndOffset() { return myOffsetMap.getOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET); } public synchronized void addItem(final LookupElement item) { if (!isRunning()) return; ProgressManager.checkCanceled(); final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode(); if (!unitTestMode) { LOG.assertTrue(!ApplicationManager.getApplication().isDispatchThread()); } myLookup.addItem(item); myCount++; if (unitTestMode) return; if (myCount == 1) { ApplicationManager.getApplication().executeOnPooledThread(new Runnable() { public void run() { try { Thread.sleep(300); } catch (InterruptedException e) { LOG.error(e); } myFreezeSemaphore.up(); } }); } myQueue.queue(myUpdate); } public void closeAndFinish(boolean hideLookup) { final CompletionProgressIndicator current = CompletionServiceImpl.getCompletionService().getCurrentCompletion(); LOG.assertTrue(this == current, current); Lookup lookup = LookupManager.getActiveLookup(myEditor); LOG.assertTrue(lookup == myLookup, lookup); myLookup.removeLookupListener(myLookupListener); finishCompletionProcess(); CompletionServiceImpl.assertPhase(CompletionPhase.NoCompletion.getClass()); if (hideLookup) { LookupManager.getInstance(getProject()).hideActiveLookup(); } } private void finishCompletionProcess() { cancel(); ApplicationManager.getApplication().assertIsDispatchThread(); Disposer.dispose(myQueue); CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion(); assert currentCompletion == this : currentCompletion + "!=" + this; CompletionServiceImpl.getCompletionService().setCurrentCompletion(null); CompletionServiceImpl.assertPhase(CompletionPhase.BgCalculation.class, CompletionPhase.ItemsCalculated.class, CompletionPhase.Synchronous.class, CompletionPhase.Restarted.class); CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion); } @TestOnly public static void cleanupForNextTest() { CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion(); if (currentCompletion != null) { currentCompletion.finishCompletionProcess(); } } public void stop() { super.stop(); myQueue.cancelAllUpdates(); myFreezeSemaphore.up(); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { final CompletionPhase phase = CompletionServiceImpl.getCompletionPhase(); if (!(phase instanceof CompletionPhase.BgCalculation) || phase.indicator != CompletionProgressIndicator.this) return; myLookup.setCalculating(false); if (hideAutopopupIfMeaningless()) { return; } if (myCount == 0) { if (!isAutopopupCompletion()) { LookupManager.getInstance(getProject()).hideActiveLookup(); final CompletionProgressIndicator current = CompletionServiceImpl.getCompletionService().getCurrentCompletion(); LOG.assertTrue(current == null, current + "!=" + CompletionProgressIndicator.this); handleEmptyLookup(!((CompletionPhase.BgCalculation)phase).modifiersChanged); } } else { CompletionServiceImpl.setCompletionPhase(new CompletionPhase.ItemsCalculated(CompletionProgressIndicator.this, ((CompletionPhase.BgCalculation)phase).focusLookupWhenDone)); updateFocus(); updateLookup(); } } }, myQueue.getModalityState()); } private boolean hideAutopopupIfMeaningless() { if (isAutopopupCompletion() && !myLookup.isSelectionTouched() && !myLookup.isCalculating()) { myLookup.refreshUi(); for (LookupElement item : myLookup.getItems()) { if (!(item.getPrefixMatcher().getPrefix() + myLookup.getAdditionalPrefix()).equals(item.getLookupString())) { return false; } } myLookup.hideLookup(false); LOG.assertTrue(CompletionServiceImpl.getCompletionService().getCurrentCompletion() == null); CompletionServiceImpl.setCompletionPhase(new CompletionPhase.EmptyAutoPopup(this)); return true; } return false; } private void updateFocus() { if (myLookup.isSelectionTouched()) { return; } if (!isAutopopupCompletion()) { myLookup.setFocused(true); return; } for (LookupElement item : myLookup.getItems()) { if ((item.getPrefixMatcher().getPrefix() + myLookup.getAdditionalPrefix()).equals(item.getLookupString())) { myLookup.setFocused(false); return; } } final CompletionPhase phase = CompletionServiceImpl.getCompletionPhase(); myLookup.setFocused(phase instanceof CompletionPhase.ItemsCalculated && ((CompletionPhase.ItemsCalculated)phase).focusLookup); } public boolean fillInCommonPrefix(final boolean explicit) { if (isInsideIdentifier()) { return false; } final Boolean aBoolean = new WriteCommandAction<Boolean>(getProject()) { protected void run(Result<Boolean> result) throws Throwable { if (!explicit) { setMergeCommand(); } try { result.setResult(myLookup.fillInCommonPrefix(explicit)); } catch (Exception e) { LOG.error(e); } } }.execute().getResultObject(); return aBoolean.booleanValue(); } public void restorePrefix(final CompletionPhase zombie) { new WriteCommandAction(getProject(), getCompletionCommandName()) { @Override protected void run(Result result) throws Throwable { setMergeCommand(); if (zombie instanceof CompletionPhase.InsertedSingleItem) { ((CompletionPhase.InsertedSingleItem)zombie).restorePrefix.run(); } getLookup().restorePrefix(); } }.execute(); } public Editor getEditor() { return myEditor; } public boolean isRepeatedInvocation(CompletionType completionType, Editor editor) { if (completionType != myParameters.getCompletionType() || editor != myEditor) { return false; } if (isAutopopupCompletion() && !myLookup.mayBeNoticed()) { return false; } return true; } @Override public boolean isAutopopupCompletion() { return myHandler.autopopup; } @NotNull public Project getProject() { return ObjectUtils.assertNotNull(myEditor.getProject()); } public void addWatchedPrefix(int startOffset, ElementPattern<String> restartCondition) { if (isAutopopupCompletion()) { myRestartingPrefixConditions.add(Pair.create(startOffset, restartCondition)); } } public void prefixUpdated() { final CharSequence text = myEditor.getDocument().getCharsSequence(); final int caretOffset = myEditor.getCaretModel().getOffset(); for (Pair<Integer, ElementPattern<String>> pair : myRestartingPrefixConditions) { final String newPrefix = text.subSequence(pair.first, caretOffset).toString(); if (pair.second.accepts(newPrefix)) { scheduleRestart(); myRestartingPrefixConditions.clear(); return; } } hideAutopopupIfMeaningless(); updateFocus(); } public void scheduleRestart() { cancel(); ApplicationManager.getApplication().assertIsDispatchThread(); final CompletionPhase phase = new CompletionPhase.Restarted(this); CompletionServiceImpl.setCompletionPhase(phase); final Project project = getProject(); ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (phase != CompletionServiceImpl.getCompletionPhase()) { return; } closeAndFinish(false); final CodeCompletionHandlerBase newHandler = new CodeCompletionHandlerBase(myParameters.getCompletionType(), false, isAutopopupCompletion()); final PsiFile psiFileInEditor = PsiUtilBase.getPsiFileInEditor(myEditor, project); try { newHandler.invokeCompletion(project, myEditor, psiFileInEditor, myParameters.getInvocationCount()); } catch (IndexNotReadyException ignored) { } } }); } @Override public String toString() { return "CompletionProgressIndicator[count=" + myCount + ",phase=" + CompletionServiceImpl.getCompletionPhase() + "]"; } protected void handleEmptyLookup(final boolean awaitSecondInvocation) { assert !isAutopopupCompletion(); if (ApplicationManager.getApplication().isUnitTestMode() || !myHandler.invokedExplicitly) { CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion); return; } for (final CompletionContributor contributor : CompletionContributor.forParameters(getParameters())) { final String text = contributor.handleEmptyLookup(getParameters(), getEditor()); if (StringUtil.isNotEmpty(text)) { LightweightHint hint = showErrorHint(getProject(), getEditor(), text); CompletionServiceImpl.setCompletionPhase( awaitSecondInvocation ? new CompletionPhase.NoSuggestionsHint(hint, this) : CompletionPhase.NoCompletion); return; } } CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion); } private static LightweightHint showErrorHint(Project project, Editor editor, String text) { final LightweightHint[] result = {null}; final EditorHintListener listener = new EditorHintListener() { public void hintShown(final Project project, final LightweightHint hint, final int flags) { result[0] = hint; } }; final MessageBusConnection connection = project.getMessageBus().connect(); connection.subscribe(EditorHintListener.TOPIC, listener); assert text != null; HintManager.getInstance().showErrorHint(editor, text); connection.disconnect(); return result[0]; } }
platform/lang-impl/src/com/intellij/codeInsight/completion/CompletionProgressIndicator.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.completion; import com.intellij.codeInsight.completion.impl.CompletionServiceImpl; import com.intellij.codeInsight.hint.EditorHintListener; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.lookup.*; import com.intellij.codeInsight.lookup.impl.LookupImpl; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.util.ProgressIndicatorBase; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.patterns.ElementPattern; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiReference; import com.intellij.psi.ReferenceRange; import com.intellij.psi.util.PsiUtilBase; import com.intellij.ui.LightweightHint; import com.intellij.util.ObjectUtils; import com.intellij.util.concurrency.Semaphore; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.update.MergingUpdateQueue; import com.intellij.util.ui.update.Update; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.util.Collections; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; /** * @author peter */ public class CompletionProgressIndicator extends ProgressIndicatorBase implements CompletionProcess{ private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.CompletionProgressIndicator"); private final Editor myEditor; private final CompletionParameters myParameters; private final CodeCompletionHandlerBase myHandler; private final LookupImpl myLookup; private final MergingUpdateQueue myQueue; private final Update myUpdate = new Update("update") { public void run() { updateLookup(); myQueue.setMergingTimeSpan(100); } }; private final Semaphore myFreezeSemaphore; private final OffsetMap myOffsetMap; private final CopyOnWriteArrayList<Pair<Integer, ElementPattern<String>>> myRestartingPrefixConditions = ContainerUtil.createEmptyCOWList(); private final LookupAdapter myLookupListener = new LookupAdapter() { public void itemSelected(LookupEvent event) { ensureDuringCompletionPassed(); finishCompletionProcess(); LookupElement item = event.getItem(); if (item == null) return; setMergeCommand(); myOffsetMap.addOffset(CompletionInitializationContext.START_OFFSET, myEditor.getCaretModel().getOffset() - item.getLookupString().length()); CodeCompletionHandlerBase.selectLookupItem(item, event.getCompletionChar(), CompletionProgressIndicator.this, myLookup.getItems()); } public void lookupCanceled(final LookupEvent event) { finishCompletionProcess(); } }; private final Semaphore myDuringCompletionSemaphore = new Semaphore(); private volatile int myCount; public CompletionProgressIndicator(final Editor editor, CompletionParameters parameters, CodeCompletionHandlerBase handler, Semaphore freezeSemaphore, final OffsetMap offsetMap, LookupImpl lookup) { myEditor = editor; myParameters = parameters; myHandler = handler; myFreezeSemaphore = freezeSemaphore; myOffsetMap = offsetMap; myLookup = lookup; myLookup.setArranger(new CompletionLookupArranger(parameters)); myLookup.addLookupListener(myLookupListener); myLookup.setCalculating(true); myQueue = new MergingUpdateQueue("completion lookup progress", 200, true, myEditor.getContentComponent()); ApplicationManager.getApplication().assertIsDispatchThread(); registerItself(); if (!ApplicationManager.getApplication().isUnitTestMode() && !lookup.isShown()) { scheduleAdvertising(); } trackModifiers(); myDuringCompletionSemaphore.down(); } public OffsetMap getOffsetMap() { return myOffsetMap; } public int getSelectionEndOffset() { return getOffsetMap().getOffset(CompletionInitializationContext.SELECTION_END_OFFSET); } void duringCompletion(CompletionInitializationContext initContext) { try { ProgressManager.checkCanceled(); if (!initContext.getOffsetMap().wasModified(CompletionInitializationContext.IDENTIFIER_END_OFFSET)) { try { final int selectionEndOffset = initContext.getSelectionEndOffset(); final PsiReference reference = initContext.getFile().findReferenceAt(selectionEndOffset); if (reference != null) { initContext.setReplacementOffset(findReplacementOffset(selectionEndOffset, reference)); } } catch (IndexNotReadyException ignored) { } } for (CompletionContributor contributor : CompletionContributor.forLanguage(initContext.getPositionLanguage())) { if (DumbService.getInstance(initContext.getProject()).isDumb() && !DumbService.isDumbAware(contributor)) { continue; } contributor.duringCompletion(initContext); } } catch (ProcessCanceledException ignored) { } finally { duringCompletionPassed(); } } void duringCompletionPassed() { myDuringCompletionSemaphore.up(); } void ensureDuringCompletionPassed() { myDuringCompletionSemaphore.waitFor(); } public void setFocusLookupWhenDone(boolean focusLookup) { LOG.assertTrue(isAutopopupCompletion()); if (focusLookup) { ((CompletionPhase.BgCalculation)CompletionServiceImpl.getCompletionPhase()).focusLookupWhenDone = true; } else { myLookup.setAdvertisementText("Press " + CompletionContributor.getActionShortcut(IdeActions.ACTION_CHOOSE_LOOKUP_ITEM_REPLACE) + " to choose the first suggestion"); } } private static int findReplacementOffset(int selectionEndOffset, PsiReference reference) { final List<TextRange> ranges = ReferenceRange.getAbsoluteRanges(reference); for (TextRange range : ranges) { if (range.contains(selectionEndOffset)) { return range.getEndOffset(); } } return reference.getElement().getTextRange().getStartOffset() + reference.getRangeInElement().getEndOffset(); } private void scheduleAdvertising() { ApplicationManager.getApplication().executeOnPooledThread(new Runnable() { public void run() { if (isOutdated()) return; //tests? final List<CompletionContributor> list = ApplicationManager.getApplication().runReadAction(new Computable<List<CompletionContributor>>() { public List<CompletionContributor> compute() { if (isOutdated()) { return Collections.emptyList(); } return CompletionContributor.forParameters(myParameters); } }); for (final CompletionContributor contributor : list) { if (myLookup.getAdvertisementText() != null) return; if (!myLookup.isCalculating() && !myLookup.isVisible()) return; String s = ApplicationManager.getApplication().runReadAction(new Computable<String>() { @Nullable public String compute() { if (isOutdated()) { return null; } return contributor.advertise(myParameters); } }); if (myLookup.getAdvertisementText() != null) return; if (s != null) { myLookup.setAdvertisementText(s); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { if (isAutopopupCompletion() && !myLookup.isShown()) { return; } if (!CompletionServiceImpl.isPhase(CompletionPhase.BgCalculation.class, CompletionPhase.ItemsCalculated.class)) { return; } if (CompletionServiceImpl.getCompletionPhase().indicator != CompletionProgressIndicator.this) { return; } updateLookup(); } }, myQueue.getModalityState()); return; } } } }); } private boolean isOutdated() { return CompletionServiceImpl.getCompletionPhase().indicator != this; } private void trackModifiers() { if (isAutopopupCompletion()) { return; } final JComponent contentComponent = myEditor.getContentComponent(); contentComponent.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent e) { processModifier(e); } public void keyReleased(KeyEvent e) { processModifier(e); } private void processModifier(KeyEvent e) { final int code = e.getKeyCode(); if (code == KeyEvent.VK_CONTROL || code == KeyEvent.VK_META || code == KeyEvent.VK_ALT || code == KeyEvent.VK_SHIFT) { contentComponent.removeKeyListener(this); final CompletionPhase phase = CompletionServiceImpl.getCompletionPhase(); if (phase instanceof CompletionPhase.BgCalculation) { ((CompletionPhase.BgCalculation)phase).modifiersChanged = true; } else if (phase instanceof CompletionPhase.ZombiePhase) { CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion); } } } }); } private void setMergeCommand() { CommandProcessor.getInstance().setCurrentCommandGroupId(getCompletionCommandName()); } private String getCompletionCommandName() { return "Completion" + hashCode(); } public void showLookup() { updateLookup(); } public CompletionParameters getParameters() { return myParameters; } private void registerItself() { CompletionServiceImpl.getCompletionService().setCurrentCompletion(this); } public CodeCompletionHandlerBase getHandler() { return myHandler; } public LookupImpl getLookup() { return myLookup; } private void updateLookup() { ApplicationManager.getApplication().assertIsDispatchThread(); if (isOutdated()) return; if (!myLookup.isShown()) { if (hideAutopopupIfMeaningless()) { return; } if (StringUtil.isEmpty(myLookup.getAdvertisementText()) && !isAutopopupCompletion()) { final String text = DefaultCompletionContributor.getDefaultAdvertisementText(myParameters); if (text != null) { myLookup.setAdvertisementText(text); } } myLookup.show(); } myLookup.refreshUi(); hideAutopopupIfMeaningless(); updateFocus(); } final boolean isInsideIdentifier() { return getIdentifierEndOffset() != getSelectionEndOffset(); } public int getIdentifierEndOffset() { return myOffsetMap.getOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET); } public synchronized void addItem(final LookupElement item) { if (!isRunning()) return; ProgressManager.checkCanceled(); final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode(); if (!unitTestMode) { LOG.assertTrue(!ApplicationManager.getApplication().isDispatchThread()); } myLookup.addItem(item); myCount++; if (unitTestMode) return; if (myCount == 1) { ApplicationManager.getApplication().executeOnPooledThread(new Runnable() { public void run() { try { Thread.sleep(300); } catch (InterruptedException e) { LOG.error(e); } myFreezeSemaphore.up(); } }); } myQueue.queue(myUpdate); } public void closeAndFinish(boolean hideLookup) { LOG.assertTrue(this == CompletionServiceImpl.getCompletionService().getCurrentCompletion()); Lookup lookup = LookupManager.getActiveLookup(myEditor); LOG.assertTrue(lookup == myLookup); myLookup.removeLookupListener(myLookupListener); finishCompletionProcess(); CompletionServiceImpl.assertPhase(CompletionPhase.NoCompletion.getClass()); if (hideLookup) { LookupManager.getInstance(getProject()).hideActiveLookup(); } } private void finishCompletionProcess() { cancel(); ApplicationManager.getApplication().assertIsDispatchThread(); Disposer.dispose(myQueue); CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion(); assert currentCompletion == this : currentCompletion + "!=" + this; CompletionServiceImpl.getCompletionService().setCurrentCompletion(null); CompletionServiceImpl.assertPhase(CompletionPhase.BgCalculation.class, CompletionPhase.ItemsCalculated.class, CompletionPhase.Synchronous.class, CompletionPhase.Restarted.class); CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion); } @TestOnly public static void cleanupForNextTest() { CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion(); if (currentCompletion != null) { currentCompletion.finishCompletionProcess(); } } public void stop() { super.stop(); myQueue.cancelAllUpdates(); myFreezeSemaphore.up(); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { final CompletionPhase phase = CompletionServiceImpl.getCompletionPhase(); if (!(phase instanceof CompletionPhase.BgCalculation) || phase.indicator != CompletionProgressIndicator.this) return; myLookup.setCalculating(false); if (hideAutopopupIfMeaningless()) { return; } if (myCount == 0) { if (!isAutopopupCompletion()) { LookupManager.getInstance(getProject()).hideActiveLookup(); final CompletionProgressIndicator current = CompletionServiceImpl.getCompletionService().getCurrentCompletion(); LOG.assertTrue(current == null, current + "!=" + CompletionProgressIndicator.this); handleEmptyLookup(!((CompletionPhase.BgCalculation)phase).modifiersChanged); } } else { CompletionServiceImpl.setCompletionPhase(new CompletionPhase.ItemsCalculated(CompletionProgressIndicator.this, ((CompletionPhase.BgCalculation)phase).focusLookupWhenDone)); updateFocus(); updateLookup(); } } }, myQueue.getModalityState()); } private boolean hideAutopopupIfMeaningless() { if (isAutopopupCompletion() && !myLookup.isSelectionTouched() && !myLookup.isCalculating()) { myLookup.refreshUi(); for (LookupElement item : myLookup.getItems()) { if (!(item.getPrefixMatcher().getPrefix() + myLookup.getAdditionalPrefix()).equals(item.getLookupString())) { return false; } } myLookup.hideLookup(false); LOG.assertTrue(CompletionServiceImpl.getCompletionService().getCurrentCompletion() == null); CompletionServiceImpl.setCompletionPhase(new CompletionPhase.EmptyAutoPopup(this)); return true; } return false; } private void updateFocus() { if (myLookup.isSelectionTouched()) { return; } if (!isAutopopupCompletion()) { myLookup.setFocused(true); return; } for (LookupElement item : myLookup.getItems()) { if ((item.getPrefixMatcher().getPrefix() + myLookup.getAdditionalPrefix()).equals(item.getLookupString())) { myLookup.setFocused(false); return; } } final CompletionPhase phase = CompletionServiceImpl.getCompletionPhase(); myLookup.setFocused(phase instanceof CompletionPhase.ItemsCalculated && ((CompletionPhase.ItemsCalculated)phase).focusLookup); } public boolean fillInCommonPrefix(final boolean explicit) { if (isInsideIdentifier()) { return false; } final Boolean aBoolean = new WriteCommandAction<Boolean>(getProject()) { protected void run(Result<Boolean> result) throws Throwable { if (!explicit) { setMergeCommand(); } try { result.setResult(myLookup.fillInCommonPrefix(explicit)); } catch (Exception e) { LOG.error(e); } } }.execute().getResultObject(); return aBoolean.booleanValue(); } public void restorePrefix(final CompletionPhase zombie) { new WriteCommandAction(getProject(), getCompletionCommandName()) { @Override protected void run(Result result) throws Throwable { setMergeCommand(); if (zombie instanceof CompletionPhase.InsertedSingleItem) { ((CompletionPhase.InsertedSingleItem)zombie).restorePrefix.run(); } getLookup().restorePrefix(); } }.execute(); } public Editor getEditor() { return myEditor; } public boolean isRepeatedInvocation(CompletionType completionType, Editor editor) { if (completionType != myParameters.getCompletionType() || editor != myEditor) { return false; } if (isAutopopupCompletion() && !myLookup.mayBeNoticed()) { return false; } return true; } @Override public boolean isAutopopupCompletion() { return myHandler.autopopup; } @NotNull public Project getProject() { return ObjectUtils.assertNotNull(myEditor.getProject()); } public void addWatchedPrefix(int startOffset, ElementPattern<String> restartCondition) { if (isAutopopupCompletion()) { myRestartingPrefixConditions.add(Pair.create(startOffset, restartCondition)); } } public void prefixUpdated() { final CharSequence text = myEditor.getDocument().getCharsSequence(); final int caretOffset = myEditor.getCaretModel().getOffset(); for (Pair<Integer, ElementPattern<String>> pair : myRestartingPrefixConditions) { final String newPrefix = text.subSequence(pair.first, caretOffset).toString(); if (pair.second.accepts(newPrefix)) { scheduleRestart(); myRestartingPrefixConditions.clear(); return; } } hideAutopopupIfMeaningless(); updateFocus(); } public void scheduleRestart() { cancel(); ApplicationManager.getApplication().assertIsDispatchThread(); final CompletionPhase phase = new CompletionPhase.Restarted(this); CompletionServiceImpl.setCompletionPhase(phase); final Project project = getProject(); ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (phase != CompletionServiceImpl.getCompletionPhase()) { return; } closeAndFinish(false); final CodeCompletionHandlerBase newHandler = new CodeCompletionHandlerBase(myParameters.getCompletionType(), false, isAutopopupCompletion()); final PsiFile psiFileInEditor = PsiUtilBase.getPsiFileInEditor(myEditor, project); try { newHandler.invokeCompletion(project, myEditor, psiFileInEditor, myParameters.getInvocationCount()); } catch (IndexNotReadyException ignored) { } } }); } @Override public String toString() { return "CompletionProgressIndicator[count=" + myCount + ",phase=" + CompletionServiceImpl.getCompletionPhase() + "]"; } protected void handleEmptyLookup(final boolean awaitSecondInvocation) { assert !isAutopopupCompletion(); if (ApplicationManager.getApplication().isUnitTestMode() || !myHandler.invokedExplicitly) { CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion); return; } for (final CompletionContributor contributor : CompletionContributor.forParameters(getParameters())) { final String text = contributor.handleEmptyLookup(getParameters(), getEditor()); if (StringUtil.isNotEmpty(text)) { LightweightHint hint = showErrorHint(getProject(), getEditor(), text); CompletionServiceImpl.setCompletionPhase( awaitSecondInvocation ? new CompletionPhase.NoSuggestionsHint(hint, this) : CompletionPhase.NoCompletion); return; } } CompletionServiceImpl.setCompletionPhase(CompletionPhase.NoCompletion); } private static LightweightHint showErrorHint(Project project, Editor editor, String text) { final LightweightHint[] result = {null}; final EditorHintListener listener = new EditorHintListener() { public void hintShown(final Project project, final LightweightHint hint, final int flags) { result[0] = hint; } }; final MessageBusConnection connection = project.getMessageBus().connect(); connection.subscribe(EditorHintListener.TOPIC, listener); assert text != null; HintManager.getInstance().showErrorHint(editor, text); connection.disconnect(); return result[0]; } }
EA-25324 diagnostics
platform/lang-impl/src/com/intellij/codeInsight/completion/CompletionProgressIndicator.java
EA-25324 diagnostics
Java
apache-2.0
5a9b69919927ee076ca0817da3489e43eb88d338
0
safarmer/bazel,whuwxl/bazel,UrbanCompass/bazel,anupcshan/bazel,snnn/bazel,variac/bazel,juhalindfors/bazel-patches,perezd/bazel,mikelalcon/bazel,zhexuany/bazel,Digas29/bazel,whuwxl/bazel,akira-baruah/bazel,werkt/bazel,dropbox/bazel,anupcshan/bazel,cushon/bazel,dropbox/bazel,werkt/bazel,kchodorow/bazel,meteorcloudy/bazel,juhalindfors/bazel-patches,dinowernli/bazel,ulfjack/bazel,ButterflyNetwork/bazel,snnn/bazel,cushon/bazel,katre/bazel,perezd/bazel,spxtr/bazel,aehlig/bazel,Asana/bazel,mrdomino/bazel,meteorcloudy/bazel,damienmg/bazel,whuwxl/bazel,LuminateWireless/bazel,hermione521/bazel,spxtr/bazel,juhalindfors/bazel-patches,dropbox/bazel,safarmer/bazel,juhalindfors/bazel-patches,juhalindfors/bazel-patches,bazelbuild/bazel,Asana/bazel,rohitsaboo/bazel,kamalmarhubi/bazel,mikelikespie/bazel,spxtr/bazel,perezd/bazel,davidzchen/bazel,davidzchen/bazel,kchodorow/bazel-1,hhclam/bazel,mbrukman/bazel,dslomov/bazel-windows,kamalmarhubi/bazel,dslomov/bazel-windows,kamalmarhubi/bazel,snnn/bazel,rohitsaboo/bazel,meteorcloudy/bazel,anupcshan/bazel,kchodorow/bazel-1,katre/bazel,iamthearm/bazel,rohitsaboo/bazel,perezd/bazel,snnn/bazel,dropbox/bazel,spxtr/bazel,katre/bazel,perezd/bazel,kchodorow/bazel-1,mikelalcon/bazel,ulfjack/bazel,dslomov/bazel,katre/bazel,variac/bazel,zhexuany/bazel,abergmeier-dsfishlabs/bazel,anupcshan/bazel,hermione521/bazel,mrdomino/bazel,hhclam/bazel,Digas29/bazel,ButterflyNetwork/bazel,safarmer/bazel,dropbox/bazel,ButterflyNetwork/bazel,werkt/bazel,werkt/bazel,mrdomino/bazel,twitter-forks/bazel,abergmeier-dsfishlabs/bazel,LuminateWireless/bazel,anupcshan/bazel,akira-baruah/bazel,ButterflyNetwork/bazel,dslomov/bazel-windows,ButterflyNetwork/bazel,werkt/bazel,dinowernli/bazel,akira-baruah/bazel,cushon/bazel,hermione521/bazel,UrbanCompass/bazel,davidzchen/bazel,safarmer/bazel,mikelikespie/bazel,twitter-forks/bazel,UrbanCompass/bazel,kchodorow/bazel-1,mrdomino/bazel,LuminateWireless/bazel,twitter-forks/bazel,davidzchen/bazel,dslomov/bazel-windows,meteorcloudy/bazel,twitter-forks/bazel,hhclam/bazel,mikelalcon/bazel,mikelikespie/bazel,Digas29/bazel,ulfjack/bazel,damienmg/bazel,spxtr/bazel,mbrukman/bazel,Asana/bazel,LuminateWireless/bazel,dinowernli/bazel,twitter-forks/bazel,Digas29/bazel,Asana/bazel,mikelikespie/bazel,perezd/bazel,kchodorow/bazel,aehlig/bazel,juhalindfors/bazel-patches,anupcshan/bazel,variac/bazel,cushon/bazel,dinowernli/bazel,UrbanCompass/bazel,Asana/bazel,twitter-forks/bazel,iamthearm/bazel,UrbanCompass/bazel,dinowernli/bazel,abergmeier-dsfishlabs/bazel,werkt/bazel,mikelalcon/bazel,spxtr/bazel,cushon/bazel,zhexuany/bazel,kchodorow/bazel,iamthearm/bazel,hermione521/bazel,variac/bazel,variac/bazel,zhexuany/bazel,zhexuany/bazel,juhalindfors/bazel-patches,LuminateWireless/bazel,hermione521/bazel,LuminateWireless/bazel,rohitsaboo/bazel,iamthearm/bazel,aehlig/bazel,kchodorow/bazel,damienmg/bazel,bazelbuild/bazel,dslomov/bazel,zhexuany/bazel,mbrukman/bazel,dslomov/bazel-windows,damienmg/bazel,dslomov/bazel-windows,mrdomino/bazel,damienmg/bazel,dslomov/bazel,abergmeier-dsfishlabs/bazel,ButterflyNetwork/bazel,dslomov/bazel,aehlig/bazel,bazelbuild/bazel,mrdomino/bazel,variac/bazel,dslomov/bazel,snnn/bazel,UrbanCompass/bazel,abergmeier-dsfishlabs/bazel,damienmg/bazel,dslomov/bazel,perezd/bazel,Digas29/bazel,whuwxl/bazel,mbrukman/bazel,akira-baruah/bazel,kamalmarhubi/bazel,ulfjack/bazel,kchodorow/bazel,dinowernli/bazel,abergmeier-dsfishlabs/bazel,whuwxl/bazel,hhclam/bazel,meteorcloudy/bazel,safarmer/bazel,dropbox/bazel,bazelbuild/bazel,kamalmarhubi/bazel,kchodorow/bazel-1,katre/bazel,hhclam/bazel,rohitsaboo/bazel,Asana/bazel,spxtr/bazel,ulfjack/bazel,kchodorow/bazel,aehlig/bazel,mbrukman/bazel,ulfjack/bazel,rohitsaboo/bazel,Digas29/bazel,mikelalcon/bazel,kchodorow/bazel-1,snnn/bazel,hermione521/bazel,hhclam/bazel,kamalmarhubi/bazel,davidzchen/bazel,meteorcloudy/bazel,whuwxl/bazel,aehlig/bazel,meteorcloudy/bazel,snnn/bazel,davidzchen/bazel,ulfjack/bazel,bazelbuild/bazel,iamthearm/bazel,dslomov/bazel,akira-baruah/bazel,katre/bazel,Asana/bazel,damienmg/bazel,bazelbuild/bazel,cushon/bazel,davidzchen/bazel,aehlig/bazel,safarmer/bazel,kchodorow/bazel,akira-baruah/bazel,mikelikespie/bazel,twitter-forks/bazel,mikelikespie/bazel,mbrukman/bazel,iamthearm/bazel,mikelalcon/bazel,variac/bazel
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.base.Preconditions; import com.google.common.cache.Cache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.cmdline.PackageIdentifier.RepositoryName; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.events.StoredEventHandler; import com.google.devtools.build.lib.packages.BuildFileContainsErrorsException; import com.google.devtools.build.lib.packages.BuildFileNotFoundException; import com.google.devtools.build.lib.packages.CachingPackageLocator; import com.google.devtools.build.lib.packages.InvalidPackageNameException; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.packages.Package; import com.google.devtools.build.lib.packages.Package.LegacyBuilder; import com.google.devtools.build.lib.packages.PackageFactory; import com.google.devtools.build.lib.packages.PackageFactory.Globber; import com.google.devtools.build.lib.packages.Preprocessor; import com.google.devtools.build.lib.packages.Preprocessor.Result; import com.google.devtools.build.lib.packages.RuleVisibility; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.profiler.Profiler; import com.google.devtools.build.lib.profiler.ProfilerTask; import com.google.devtools.build.lib.skyframe.ASTFileLookupValue.ASTLookupInputException; import com.google.devtools.build.lib.skyframe.GlobValue.InvalidGlobPatternException; import com.google.devtools.build.lib.skyframe.SkylarkImportLookupFunction.SkylarkImportFailedException; import com.google.devtools.build.lib.syntax.BuildFileAST; import com.google.devtools.build.lib.syntax.Environment.Extension; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.ParserInputSource; import com.google.devtools.build.lib.syntax.Statement; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionException.Transience; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.ValueOrException3; import com.google.devtools.build.skyframe.ValueOrException4; import com.google.devtools.build.skyframe.ValueOrExceptionUtils; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.Nullable; /** * A SkyFunction for {@link PackageValue}s. */ public class PackageFunction implements SkyFunction { private final PackageFactory packageFactory; private final CachingPackageLocator packageLocator; private final Cache<PackageIdentifier, Package.LegacyBuilder> packageFunctionCache; private final Cache<PackageIdentifier, Preprocessor.Result> preprocessCache; private final AtomicBoolean showLoadingProgress; private final AtomicInteger numPackagesLoaded; private final Profiler profiler = Profiler.instance(); private final PathFragment preludePath; // Not final only for testing. @Nullable private SkylarkImportLookupFunction skylarkImportLookupFunctionForInlining; static final PathFragment DEFAULTS_PACKAGE_NAME = new PathFragment("tools/defaults"); public PackageFunction( PackageFactory packageFactory, CachingPackageLocator pkgLocator, AtomicBoolean showLoadingProgress, Cache<PackageIdentifier, LegacyBuilder> packageFunctionCache, Cache<PackageIdentifier, Result> preprocessCache, AtomicInteger numPackagesLoaded, @Nullable SkylarkImportLookupFunction skylarkImportLookupFunctionForInlining) { this.skylarkImportLookupFunctionForInlining = skylarkImportLookupFunctionForInlining; // Can be null in tests. this.preludePath = packageFactory == null ? null : packageFactory.getRuleClassProvider().getPreludePath(); this.packageFactory = packageFactory; this.packageLocator = pkgLocator; this.showLoadingProgress = showLoadingProgress; this.packageFunctionCache = packageFunctionCache; this.preprocessCache = preprocessCache; this.numPackagesLoaded = numPackagesLoaded; } public void setSkylarkImportLookupFunctionForInliningForTesting( SkylarkImportLookupFunction skylarkImportLookupFunctionForInlining) { this.skylarkImportLookupFunctionForInlining = skylarkImportLookupFunctionForInlining; } private static void maybeThrowFilesystemInconsistency(PackageIdentifier packageIdentifier, Exception skyframeException, boolean packageWasInError) throws InternalInconsistentFilesystemException { if (!packageWasInError) { throw new InternalInconsistentFilesystemException(packageIdentifier, "Encountered error '" + skyframeException.getMessage() + "' but didn't encounter it when doing the same thing " + "earlier in the build"); } } /** * Marks the given dependencies, and returns those already present. Ignores any exception * thrown while building the dependency, except for filesystem inconsistencies. * * <p>We need to mark dependencies implicitly used by the legacy package loading code, but we * don't care about any skyframe errors since the package knows whether it's in error or not. */ private static Pair<? extends Map<PathFragment, PackageLookupValue>, Boolean> getPackageLookupDepsAndPropagateInconsistentFilesystemExceptions( PackageIdentifier packageIdentifier, Iterable<SkyKey> depKeys, Environment env, boolean packageWasInError) throws InternalInconsistentFilesystemException { Preconditions.checkState( Iterables.all(depKeys, SkyFunctions.isSkyFunction(SkyFunctions.PACKAGE_LOOKUP)), depKeys); boolean packageShouldBeInError = packageWasInError; ImmutableMap.Builder<PathFragment, PackageLookupValue> builder = ImmutableMap.builder(); for (Map.Entry<SkyKey, ValueOrException3<BuildFileNotFoundException, InconsistentFilesystemException, FileSymlinkException>> entry : env.getValuesOrThrow(depKeys, BuildFileNotFoundException.class, InconsistentFilesystemException.class, FileSymlinkException.class).entrySet()) { PathFragment pkgName = ((PackageIdentifier) entry.getKey().argument()).getPackageFragment(); try { PackageLookupValue value = (PackageLookupValue) entry.getValue().get(); if (value != null) { builder.put(pkgName, value); } } catch (BuildFileNotFoundException e) { maybeThrowFilesystemInconsistency(packageIdentifier, e, packageWasInError); } catch (InconsistentFilesystemException e) { throw new InternalInconsistentFilesystemException(packageIdentifier, e); } catch (FileSymlinkException e) { // Legacy doesn't detect symlink cycles. packageShouldBeInError = true; } } return Pair.of(builder.build(), packageShouldBeInError); } private static boolean markFileDepsAndPropagateInconsistentFilesystemExceptions( PackageIdentifier packageIdentifier, Iterable<SkyKey> depKeys, Environment env, boolean packageWasInError) throws InternalInconsistentFilesystemException { Preconditions.checkState( Iterables.all(depKeys, SkyFunctions.isSkyFunction(SkyFunctions.FILE)), depKeys); boolean packageShouldBeInError = packageWasInError; for (Map.Entry<SkyKey, ValueOrException3<IOException, FileSymlinkException, InconsistentFilesystemException>> entry : env.getValuesOrThrow(depKeys, IOException.class, FileSymlinkException.class, InconsistentFilesystemException.class).entrySet()) { try { entry.getValue().get(); } catch (IOException e) { maybeThrowFilesystemInconsistency(packageIdentifier, e, packageWasInError); } catch (FileSymlinkException e) { // Legacy doesn't detect symlink cycles. packageShouldBeInError = true; } catch (InconsistentFilesystemException e) { throw new InternalInconsistentFilesystemException(packageIdentifier, e); } } return packageShouldBeInError; } private static boolean markGlobDepsAndPropagateInconsistentFilesystemExceptions( PackageIdentifier packageIdentifier, Iterable<SkyKey> depKeys, Environment env, boolean packageWasInError) throws InternalInconsistentFilesystemException { Preconditions.checkState( Iterables.all(depKeys, SkyFunctions.isSkyFunction(SkyFunctions.GLOB)), depKeys); boolean packageShouldBeInError = packageWasInError; for (Map.Entry<SkyKey, ValueOrException4<IOException, BuildFileNotFoundException, FileSymlinkException, InconsistentFilesystemException>> entry : env.getValuesOrThrow(depKeys, IOException.class, BuildFileNotFoundException.class, FileSymlinkException.class, InconsistentFilesystemException.class).entrySet()) { try { entry.getValue().get(); } catch (IOException | BuildFileNotFoundException e) { maybeThrowFilesystemInconsistency(packageIdentifier, e, packageWasInError); } catch (FileSymlinkException e) { // Legacy doesn't detect symlink cycles. packageShouldBeInError = true; } catch (InconsistentFilesystemException e) { throw new InternalInconsistentFilesystemException(packageIdentifier, e); } } return packageShouldBeInError; } /** * Marks dependencies implicitly used by legacy package loading code, after the fact. Note that * the given package might already be in error. * * <p>Any skyframe exceptions encountered here are ignored, as similar errors should have * already been encountered by legacy package loading (if not, then the filesystem is * inconsistent). */ private static boolean markDependenciesAndPropagateInconsistentFilesystemExceptions( Environment env, Collection<Pair<String, Boolean>> globPatterns, Map<Label, Path> subincludes, PackageIdentifier packageIdentifier, boolean containsErrors) throws InternalInconsistentFilesystemException { boolean packageShouldBeInError = containsErrors; // TODO(bazel-team): This means that many packages will have to be preprocessed twice. Ouch! // We need a better continuation mechanism to avoid repeating work. [skyframe-loading] // TODO(bazel-team): It would be preferable to perform I/O from the package preprocessor via // Skyframe rather than add (potentially incomplete) dependencies after the fact. // [skyframe-loading] Set<SkyKey> subincludePackageLookupDepKeys = Sets.newHashSet(); for (Label label : subincludes.keySet()) { // Declare a dependency on the package lookup for the package giving access to the label. subincludePackageLookupDepKeys.add(PackageLookupValue.key(label.getPackageIdentifier())); } Pair<? extends Map<PathFragment, PackageLookupValue>, Boolean> subincludePackageLookupResult = getPackageLookupDepsAndPropagateInconsistentFilesystemExceptions( packageIdentifier, subincludePackageLookupDepKeys, env, containsErrors); Map<PathFragment, PackageLookupValue> subincludePackageLookupDeps = subincludePackageLookupResult.getFirst(); packageShouldBeInError |= subincludePackageLookupResult.getSecond(); List<SkyKey> subincludeFileDepKeys = Lists.newArrayList(); for (Entry<Label, Path> subincludeEntry : subincludes.entrySet()) { // Ideally, we would have a direct dependency on the target with the given label, but then // subincluding a file from the same package will cause a dependency cycle, since targets // depend on their containing packages. Label label = subincludeEntry.getKey(); PackageLookupValue subincludePackageLookupValue = subincludePackageLookupDeps.get(label.getPackageFragment()); if (subincludePackageLookupValue != null) { // Declare a dependency on the actual file that was subincluded. Path subincludeFilePath = subincludeEntry.getValue(); if (subincludeFilePath != null && !subincludePackageLookupValue.packageExists()) { // Legacy blaze puts a non-null path when only when the package does indeed exist. throw new InternalInconsistentFilesystemException( packageIdentifier, String.format( "Unexpected package in %s. Was it modified during the build?", subincludeFilePath)); } if (subincludePackageLookupValue.packageExists()) { // Sanity check for consistency of Skyframe and legacy blaze. Path subincludeFilePathSkyframe = subincludePackageLookupValue.getRoot().getRelative(label.toPathFragment()); if (subincludeFilePath != null && !subincludeFilePathSkyframe.equals(subincludeFilePath)) { throw new InternalInconsistentFilesystemException( packageIdentifier, String.format( "Inconsistent package location for %s: '%s' vs '%s'. " + "Was the source tree modified during the build?", label.getPackageFragment(), subincludeFilePathSkyframe, subincludeFilePath)); } // The actual file may be under a different package root than the package being // constructed. SkyKey subincludeSkyKey = FileValue.key( RootedPath.toRootedPath( subincludePackageLookupValue.getRoot(), label.getPackageFragment().getRelative(label.getName()))); subincludeFileDepKeys.add(subincludeSkyKey); } } } packageShouldBeInError |= markFileDepsAndPropagateInconsistentFilesystemExceptions( packageIdentifier, subincludeFileDepKeys, env, containsErrors); // TODO(bazel-team): In the long term, we want to actually resolve the glob patterns within // Skyframe. For now, just logging the glob requests provides correct incrementality and // adequate performance. List<SkyKey> globDepKeys = Lists.newArrayList(); for (Pair<String, Boolean> globPattern : globPatterns) { String pattern = globPattern.getFirst(); boolean excludeDirs = globPattern.getSecond(); SkyKey globSkyKey; try { globSkyKey = GlobValue.key(packageIdentifier, pattern, excludeDirs, PathFragment.EMPTY_FRAGMENT); } catch (InvalidGlobPatternException e) { // Globs that make it to pkg.getGlobPatterns() should already be filtered for errors. throw new IllegalStateException(e); } globDepKeys.add(globSkyKey); } packageShouldBeInError |= markGlobDepsAndPropagateInconsistentFilesystemExceptions( packageIdentifier, globDepKeys, env, containsErrors); return packageShouldBeInError; } /** * Adds a dependency on the WORKSPACE file, representing it as a special type of package. * @throws PackageFunctionException if there is an error computing the workspace file or adding * its rules to the //external package. */ private SkyValue getExternalPackage(Environment env, Path packageLookupPath) throws PackageFunctionException { RootedPath workspacePath = RootedPath.toRootedPath( packageLookupPath, new PathFragment("WORKSPACE")); SkyKey workspaceKey = PackageValue.workspaceKey(workspacePath); PackageValue workspace = null; try { workspace = (PackageValue) env.getValueOrThrow(workspaceKey, IOException.class, FileSymlinkException.class, InconsistentFilesystemException.class, EvalException.class); } catch (IOException | FileSymlinkException | InconsistentFilesystemException | EvalException e) { throw new PackageFunctionException(new BadWorkspaceFileException(e.getMessage()), Transience.PERSISTENT); } if (workspace == null) { return null; } Package pkg = workspace.getPackage(); Event.replayEventsOn(env.getListener(), pkg.getEvents()); return new PackageValue(pkg); } @Override public SkyValue compute(SkyKey key, Environment env) throws PackageFunctionException, InterruptedException { PackageIdentifier packageId = (PackageIdentifier) key.argument(); PathFragment packageNameFragment = packageId.getPackageFragment(); SkyKey packageLookupKey = PackageLookupValue.key(packageId); PackageLookupValue packageLookupValue; try { packageLookupValue = (PackageLookupValue) env.getValueOrThrow(packageLookupKey, BuildFileNotFoundException.class, InconsistentFilesystemException.class); } catch (BuildFileNotFoundException e) { throw new PackageFunctionException(e, Transience.PERSISTENT); } catch (InconsistentFilesystemException e) { // This error is not transient from the perspective of the PackageFunction. throw new PackageFunctionException( new InternalInconsistentFilesystemException(packageId, e), Transience.PERSISTENT); } if (packageLookupValue == null) { return null; } if (!packageLookupValue.packageExists()) { switch (packageLookupValue.getErrorReason()) { case NO_BUILD_FILE: case DELETED_PACKAGE: case NO_EXTERNAL_PACKAGE: throw new PackageFunctionException(new BuildFileNotFoundException(packageId, packageLookupValue.getErrorMsg()), Transience.PERSISTENT); case INVALID_PACKAGE_NAME: throw new PackageFunctionException(new InvalidPackageNameException(packageId, packageLookupValue.getErrorMsg()), Transience.PERSISTENT); default: // We should never get here. throw new IllegalStateException(); } } if (packageId.equals(Package.EXTERNAL_PACKAGE_IDENTIFIER)) { return getExternalPackage(env, packageLookupValue.getRoot()); } SkyKey externalPackageKey = PackageValue.key(Package.EXTERNAL_PACKAGE_IDENTIFIER); PackageValue externalPackage = (PackageValue) env.getValue(externalPackageKey); if (externalPackage == null) { return null; } Package externalPkg = externalPackage.getPackage(); if (externalPkg.containsErrors()) { throw new PackageFunctionException( new BuildFileContainsErrorsException(Package.EXTERNAL_PACKAGE_IDENTIFIER), Transience.PERSISTENT); } boolean isDefaultsPackage = packageNameFragment.equals(DEFAULTS_PACKAGE_NAME) && packageId.getRepository().isDefault(); PathFragment buildFileFragment = packageNameFragment.getChild("BUILD"); RootedPath buildFileRootedPath = RootedPath.toRootedPath(packageLookupValue.getRoot(), buildFileFragment); FileValue buildFileValue = null; if (!isDefaultsPackage) { try { buildFileValue = (FileValue) env.getValueOrThrow(FileValue.key(buildFileRootedPath), IOException.class, FileSymlinkException.class, InconsistentFilesystemException.class); } catch (IOException | FileSymlinkException | InconsistentFilesystemException e) { throw new IllegalStateException("Package lookup succeeded but encountered error when " + "getting FileValue for BUILD file directly.", e); } if (buildFileValue == null) { return null; } Preconditions.checkState(buildFileValue.exists(), "Package lookup succeeded but BUILD file doesn't exist"); } Path buildFilePath = buildFileRootedPath.asPath(); String replacementContents = null; if (isDefaultsPackage) { replacementContents = PrecomputedValue.DEFAULTS_PACKAGE_CONTENTS.get(env); if (replacementContents == null) { return null; } } RuleVisibility defaultVisibility = PrecomputedValue.DEFAULT_VISIBILITY.get(env); if (defaultVisibility == null) { return null; } ASTFileLookupValue astLookupValue = null; SkyKey astLookupKey = ASTFileLookupValue.key( PackageIdentifier.createInDefaultRepo(preludePath)); try { astLookupValue = (ASTFileLookupValue) env.getValueOrThrow(astLookupKey, ErrorReadingSkylarkExtensionException.class, InconsistentFilesystemException.class); } catch (ErrorReadingSkylarkExtensionException | InconsistentFilesystemException e) { throw new PackageFunctionException(new BadPreludeFileException(packageId, e.getMessage()), Transience.PERSISTENT); } if (astLookupValue == null) { return null; } List<Statement> preludeStatements = astLookupValue.getAST() == null ? ImmutableList.<Statement>of() : astLookupValue.getAST().getStatements(); Package.LegacyBuilder legacyPkgBuilder = loadPackage( externalPkg, replacementContents, packageId, buildFilePath, buildFileValue, buildFileFragment, defaultVisibility, preludeStatements, env); if (legacyPkgBuilder == null) { return null; } legacyPkgBuilder.buildPartial(); try { handleLabelsCrossingSubpackagesAndPropagateInconsistentFilesystemExceptions( packageLookupValue.getRoot(), packageId, legacyPkgBuilder, env); } catch (InternalInconsistentFilesystemException e) { packageFunctionCache.invalidate(packageId); throw new PackageFunctionException(e, e.isTransient() ? Transience.TRANSIENT : Transience.PERSISTENT); } if (env.valuesMissing()) { // The package we just loaded will be in the {@code packageFunctionCache} next when this // SkyFunction is called again. return null; } Collection<Pair<String, Boolean>> globPatterns = legacyPkgBuilder.getGlobPatterns(); Map<Label, Path> subincludes = legacyPkgBuilder.getSubincludes(); Event.replayEventsOn(env.getListener(), legacyPkgBuilder.getEvents()); boolean packageShouldBeConsideredInError; try { packageShouldBeConsideredInError = markDependenciesAndPropagateInconsistentFilesystemExceptions( env, globPatterns, subincludes, packageId, legacyPkgBuilder.containsErrors()); } catch (InternalInconsistentFilesystemException e) { packageFunctionCache.invalidate(packageId); throw new PackageFunctionException(e, e.isTransient() ? Transience.TRANSIENT : Transience.PERSISTENT); } if (env.valuesMissing()) { return null; } if (packageShouldBeConsideredInError) { legacyPkgBuilder.setContainsErrors(); } Package pkg = legacyPkgBuilder.finishBuild(); // We know this SkyFunction will not be called again, so we can remove the cache entry. packageFunctionCache.invalidate(packageId); return new PackageValue(pkg); } // TODO(bazel-team): this should take the AST so we don't parse the file twice. @Nullable private SkylarkImportResult discoverSkylarkImports( Path buildFilePath, PathFragment buildFileFragment, PackageIdentifier packageId, Environment env, ParserInputSource inputSource, List<Statement> preludeStatements) throws PackageFunctionException, InterruptedException { StoredEventHandler eventHandler = new StoredEventHandler(); BuildFileAST buildFileAST = BuildFileAST.parseBuildFile( inputSource, preludeStatements, eventHandler, /* parse python */ false); SkylarkImportResult importResult; if (eventHandler.hasErrors()) { importResult = new SkylarkImportResult( ImmutableMap.<PathFragment, Extension>of(), ImmutableList.<Label>of()); } else { importResult = fetchImportsFromBuildFile(buildFilePath, buildFileFragment, packageId, buildFileAST, env); } return importResult; } static SkyKey getImportKey( Map.Entry<Location, PathFragment> entry, PathFragment preludePath, PathFragment buildFileFragment, PackageIdentifier packageId) throws ASTLookupInputException { PathFragment importFile = entry.getValue(); // HACK: The prelude sometimes contains load() statements, which need to be resolved // relative to the prelude file. However, we don't have a good way to tell "this should come // from the main repository" in a load() statement, and we don't have a good way to tell if // a load() statement comes from the prelude, since we just prepend those statements before // the actual BUILD file. So we use this evil .endsWith() statement to figure it out. RepositoryName repository = entry.getKey().getPath().endsWith(preludePath) ? PackageIdentifier.DEFAULT_REPOSITORY_NAME : packageId.getRepository(); return SkylarkImportLookupValue.key(repository, buildFileFragment, importFile); } private static SkyKey getImportKeyAndMaybeThrowException( Map.Entry<Location, PathFragment> entry, PathFragment preludePath, PathFragment buildFileFragment, PackageIdentifier packageId) throws PackageFunctionException { try { return getImportKey(entry, preludePath, buildFileFragment, packageId); } catch (ASTLookupInputException e) { // The load syntax is bad in the BUILD file so BuildFileContainsErrorsException is OK. throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT); } } /** * Fetch the skylark loads for this BUILD file. If any of them haven't been computed yet, * returns null. */ @Nullable private SkylarkImportResult fetchImportsFromBuildFile( Path buildFilePath, PathFragment buildFileFragment, PackageIdentifier packageId, BuildFileAST buildFileAST, Environment env) throws PackageFunctionException, InterruptedException { ImmutableMap<Location, PathFragment> imports = buildFileAST.getImports(); Map<PathFragment, Extension> importMap = Maps.newHashMapWithExpectedSize(imports.size()); ImmutableList.Builder<SkylarkFileDependency> fileDependencies = ImmutableList.builder(); Map< SkyKey, ValueOrException4< SkylarkImportFailedException, InconsistentFilesystemException, ASTLookupInputException, BuildFileNotFoundException>> skylarkImportMap; Map<SkyKey, PathFragment> skylarkImports = Maps.newHashMapWithExpectedSize(imports.size()); if (skylarkImportLookupFunctionForInlining != null) { skylarkImportMap = Maps.newHashMapWithExpectedSize(imports.size()); for (Map.Entry<Location, PathFragment> entry : imports.entrySet()) { SkyKey importKey = getImportKeyAndMaybeThrowException(entry, preludePath, buildFileFragment, packageId); skylarkImports.put(importKey, entry.getValue()); ValueOrException4< SkylarkImportFailedException, InconsistentFilesystemException, ASTLookupInputException, BuildFileNotFoundException> lookupResult; try { SkyValue value = skylarkImportLookupFunctionForInlining.computeWithInlineCalls(importKey, env); if (value == null) { Preconditions.checkState(env.valuesMissing(), importKey); // Don't give up on computing. This is against the Skyframe contract, but we don't want // to pay the price of serializing all these calls, since they are fundamentally // independent. lookupResult = ValueOrExceptionUtils.ofNullValue(); } else { lookupResult = ValueOrExceptionUtils.ofValue(value); } } catch (SkyFunctionException e) { Exception cause = e.getCause(); if (cause instanceof SkylarkImportFailedException) { lookupResult = ValueOrExceptionUtils.ofExn1((SkylarkImportFailedException) cause); } else if (cause instanceof InconsistentFilesystemException) { lookupResult = ValueOrExceptionUtils.ofExn2((InconsistentFilesystemException) cause); } else if (cause instanceof ASTLookupInputException) { lookupResult = ValueOrExceptionUtils.ofExn3((ASTLookupInputException) cause); } else if (cause instanceof BuildFileNotFoundException) { lookupResult = ValueOrExceptionUtils.ofExn4((BuildFileNotFoundException) cause); } else { throw new IllegalStateException("Unexpected type for " + importKey, e); } } skylarkImportMap.put(importKey, lookupResult); } } else { for (Map.Entry<Location, PathFragment> entry : imports.entrySet()) { skylarkImports.put( getImportKeyAndMaybeThrowException(entry, preludePath, buildFileFragment, packageId), entry.getValue()); } skylarkImportMap = env.getValuesOrThrow( skylarkImports.keySet(), SkylarkImportFailedException.class, InconsistentFilesystemException.class, ASTLookupInputException.class, BuildFileNotFoundException.class); } for (Map.Entry< SkyKey, ValueOrException4< SkylarkImportFailedException, InconsistentFilesystemException, ASTLookupInputException, BuildFileNotFoundException>> entry : skylarkImportMap.entrySet()) { SkylarkImportLookupValue importLookupValue; try { importLookupValue = (SkylarkImportLookupValue) entry.getValue().get(); } catch (SkylarkImportFailedException e) { env.getListener().handle(Event.error(Location.fromFile(buildFilePath), e.getMessage())); throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT); } catch (InconsistentFilesystemException e) { throw new PackageFunctionException( new InternalInconsistentFilesystemException(packageId, e), Transience.PERSISTENT); } catch (ASTLookupInputException e) { // The load syntax is bad in the BUILD file so BuildFileContainsErrorsException is OK. throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT); } catch (BuildFileNotFoundException e) { throw new PackageFunctionException(e, Transience.PERSISTENT); } if (importLookupValue == null) { Preconditions.checkState(env.valuesMissing(), entry); } else { importMap.put( skylarkImports.get(entry.getKey()), importLookupValue.getEnvironmentExtension()); fileDependencies.add(importLookupValue.getDependency()); } } if (env.valuesMissing()) { // There are unavailable Skylark dependencies. return null; } return new SkylarkImportResult(importMap, transitiveClosureOfLabels(fileDependencies.build())); } private ImmutableList<Label> transitiveClosureOfLabels( ImmutableList<SkylarkFileDependency> immediateDeps) { Set<Label> transitiveClosure = Sets.newHashSet(); transitiveClosureOfLabels(immediateDeps, transitiveClosure); return ImmutableList.copyOf(transitiveClosure); } private void transitiveClosureOfLabels( ImmutableList<SkylarkFileDependency> immediateDeps, Set<Label> transitiveClosure) { for (SkylarkFileDependency dep : immediateDeps) { if (transitiveClosure.add(dep.getLabel())) { transitiveClosureOfLabels(dep.getDependencies(), transitiveClosure); } } } @Nullable @Override public String extractTag(SkyKey skyKey) { return null; } private static void handleLabelsCrossingSubpackagesAndPropagateInconsistentFilesystemExceptions( Path pkgRoot, PackageIdentifier pkgId, Package.LegacyBuilder pkgBuilder, Environment env) throws InternalInconsistentFilesystemException { Set<SkyKey> containingPkgLookupKeys = Sets.newHashSet(); Map<Target, SkyKey> targetToKey = new HashMap<>(); for (Target target : pkgBuilder.getTargets()) { PathFragment dir = target.getLabel().toPathFragment().getParentDirectory(); PackageIdentifier dirId = PackageIdentifier.create(pkgId.getRepository(), dir); if (dir.equals(pkgId.getPackageFragment())) { continue; } SkyKey key = ContainingPackageLookupValue.key(dirId); targetToKey.put(target, key); containingPkgLookupKeys.add(key); } Map<Label, SkyKey> subincludeToKey = new HashMap<>(); for (Label subincludeLabel : pkgBuilder.getSubincludeLabels()) { PathFragment dir = subincludeLabel.toPathFragment().getParentDirectory(); PackageIdentifier dirId = PackageIdentifier.create(pkgId.getRepository(), dir); if (dir.equals(pkgId.getPackageFragment())) { continue; } SkyKey key = ContainingPackageLookupValue.key(dirId); subincludeToKey.put(subincludeLabel, key); containingPkgLookupKeys.add(ContainingPackageLookupValue.key(dirId)); } Map<SkyKey, ValueOrException3<BuildFileNotFoundException, InconsistentFilesystemException, FileSymlinkException>> containingPkgLookupValues = env.getValuesOrThrow( containingPkgLookupKeys, BuildFileNotFoundException.class, InconsistentFilesystemException.class, FileSymlinkException.class); if (env.valuesMissing()) { return; } for (Target target : ImmutableSet.copyOf(pkgBuilder.getTargets())) { SkyKey key = targetToKey.get(target); if (!containingPkgLookupValues.containsKey(key)) { continue; } ContainingPackageLookupValue containingPackageLookupValue = getContainingPkgLookupValueAndPropagateInconsistentFilesystemExceptions( pkgId, containingPkgLookupValues.get(key), env); if (maybeAddEventAboutLabelCrossingSubpackage(pkgBuilder, pkgRoot, target.getLabel(), target.getLocation(), containingPackageLookupValue)) { pkgBuilder.removeTarget(target); pkgBuilder.setContainsErrors(); } } for (Label subincludeLabel : pkgBuilder.getSubincludeLabels()) { SkyKey key = subincludeToKey.get(subincludeLabel); if (!containingPkgLookupValues.containsKey(key)) { continue; } ContainingPackageLookupValue containingPackageLookupValue = getContainingPkgLookupValueAndPropagateInconsistentFilesystemExceptions( pkgId, containingPkgLookupValues.get(key), env); if (maybeAddEventAboutLabelCrossingSubpackage(pkgBuilder, pkgRoot, subincludeLabel, /*location=*/null, containingPackageLookupValue)) { pkgBuilder.setContainsErrors(); } } } @Nullable private static ContainingPackageLookupValue getContainingPkgLookupValueAndPropagateInconsistentFilesystemExceptions( PackageIdentifier packageIdentifier, ValueOrException3<BuildFileNotFoundException, InconsistentFilesystemException, FileSymlinkException> containingPkgLookupValueOrException, Environment env) throws InternalInconsistentFilesystemException { try { return (ContainingPackageLookupValue) containingPkgLookupValueOrException.get(); } catch (BuildFileNotFoundException | FileSymlinkException e) { env.getListener().handle(Event.error(null, e.getMessage())); return null; } catch (InconsistentFilesystemException e) { throw new InternalInconsistentFilesystemException(packageIdentifier, e); } } private static boolean maybeAddEventAboutLabelCrossingSubpackage( Package.LegacyBuilder pkgBuilder, Path pkgRoot, Label label, @Nullable Location location, @Nullable ContainingPackageLookupValue containingPkgLookupValue) { if (containingPkgLookupValue == null) { return true; } if (!containingPkgLookupValue.hasContainingPackage()) { // The missing package here is a problem, but it's not an error from the perspective of // PackageFunction. return false; } PackageIdentifier containingPkg = containingPkgLookupValue.getContainingPackageName(); if (containingPkg.equals(label.getPackageIdentifier())) { // The label does not cross a subpackage boundary. return false; } if (!containingPkg.getPathFragment().startsWith( label.getPackageIdentifier().getPathFragment())) { // This label is referencing an imaginary package, because the containing package should // extend the label's package: if the label is //a/b:c/d, the containing package could be // //a/b/c or //a/b, but should never be //a. Usually such errors will be caught earlier, but // in some exceptional cases (such as a Python-aware BUILD file catching its own io // exceptions), it reaches here, and we tolerate it. return false; } PathFragment labelNameFragment = new PathFragment(label.getName()); String message = String.format("Label '%s' crosses boundary of subpackage '%s'", label, containingPkg); Path containingRoot = containingPkgLookupValue.getContainingPackageRoot(); if (pkgRoot.equals(containingRoot)) { PathFragment labelNameInContainingPackage = labelNameFragment.subFragment( containingPkg.getPackageFragment().segmentCount() - label.getPackageFragment().segmentCount(), labelNameFragment.segmentCount()); message += " (perhaps you meant to put the colon here: " + "'//" + containingPkg + ":" + labelNameInContainingPackage + "'?)"; } else { message += " (have you deleted " + containingPkg + "/BUILD? " + "If so, use the --deleted_packages=" + containingPkg + " option)"; } pkgBuilder.addEvent(Event.error(location, message)); return true; } /** * Constructs a {@link Package} object for the given package using legacy package loading. * Note that the returned package may be in error. * * <p>May return null if the computation has to be restarted. * * <p>Exactly one of {@code replacementContents} and {@link buildFileValue} will be * non-{@code null}. The former indicates that we have a faux BUILD file with the given contents * and the latter indicates that we have a legitimate BUILD file and should actually do * preprocessing. */ @Nullable private Package.LegacyBuilder loadPackage( Package externalPkg, @Nullable String replacementContents, PackageIdentifier packageId, Path buildFilePath, @Nullable FileValue buildFileValue, PathFragment buildFileFragment, RuleVisibility defaultVisibility, List<Statement> preludeStatements, Environment env) throws InterruptedException, PackageFunctionException { Package.LegacyBuilder pkgBuilder = packageFunctionCache.getIfPresent(packageId); if (pkgBuilder == null) { profiler.startTask(ProfilerTask.CREATE_PACKAGE, packageId.toString()); try { Globber globber = packageFactory.createLegacyGlobber(buildFilePath.getParentDirectory(), packageId, packageLocator); Preprocessor.Result preprocessingResult = preprocessCache.getIfPresent(packageId); if (preprocessingResult == null) { if (showLoadingProgress.get()) { env.getListener().handle(Event.progress("Loading package: " + packageId)); } // Even though we only open and read the file on a cache miss, note that the BUILD is // still parsed two times. Also, the preprocessor may suboptimally open and read it again // anyway. ParserInputSource inputSource; if (replacementContents == null) { long buildFileSize = Preconditions.checkNotNull(buildFileValue, packageId).getSize(); try { inputSource = ParserInputSource.create(buildFilePath, buildFileSize); } catch (IOException e) { env.getListener().handle(Event.error(Location.fromFile(buildFilePath), e.getMessage())); // Note that we did this work, so we should conservatively report this error as // transient. throw new PackageFunctionException(new BuildFileContainsErrorsException( packageId, e.getMessage()), Transience.TRANSIENT); } try { preprocessingResult = packageFactory.preprocess(packageId, inputSource, globber); } catch (IOException e) { env.getListener().handle(Event.error( Location.fromFile(buildFilePath), "preprocessing failed: " + e.getMessage())); throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, "preprocessing failed", e), Transience.TRANSIENT); } } else { ParserInputSource replacementSource = ParserInputSource.create(replacementContents, buildFilePath.asFragment()); preprocessingResult = Preprocessor.Result.noPreprocessing(replacementSource); } preprocessCache.put(packageId, preprocessingResult); } SkylarkImportResult importResult; try { importResult = discoverSkylarkImports( buildFilePath, buildFileFragment, packageId, env, preprocessingResult.result, preludeStatements); } catch (PackageFunctionException | InterruptedException e) { preprocessCache.invalidate(packageId); throw e; } if (importResult == null) { return null; } preprocessCache.invalidate(packageId); pkgBuilder = packageFactory.createPackageFromPreprocessingResult(externalPkg, packageId, buildFilePath, preprocessingResult, preprocessingResult.events, preludeStatements, importResult.importMap, importResult.fileDependencies, packageLocator, defaultVisibility, globber); numPackagesLoaded.incrementAndGet(); packageFunctionCache.put(packageId, pkgBuilder); } finally { profiler.completeTask(ProfilerTask.CREATE_PACKAGE); } } return pkgBuilder; } private static class InternalInconsistentFilesystemException extends NoSuchPackageException { private boolean isTransient; /** * Used to represent a filesystem inconsistency discovered outside the * {@link PackageFunction}. */ public InternalInconsistentFilesystemException(PackageIdentifier packageIdentifier, InconsistentFilesystemException e) { super(packageIdentifier, e.getMessage(), e); // This is not a transient error from the perspective of the PackageFunction. this.isTransient = false; } /** Used to represent a filesystem inconsistency discovered by the {@link PackageFunction}. */ public InternalInconsistentFilesystemException(PackageIdentifier packageIdentifier, String inconsistencyMessage) { this(packageIdentifier, new InconsistentFilesystemException(inconsistencyMessage)); this.isTransient = true; } public boolean isTransient() { return isTransient; } } private static class BadWorkspaceFileException extends NoSuchPackageException { private BadWorkspaceFileException(String message) { super( Package.EXTERNAL_PACKAGE_IDENTIFIER, "Error encountered while dealing with the WORKSPACE file: " + message); } } private static class BadPreludeFileException extends NoSuchPackageException { private BadPreludeFileException(PackageIdentifier packageIdentifier, String message) { super(packageIdentifier, "Error encountered while reading the prelude file: " + message); } } /** * Used to declare all the exception types that can be wrapped in the exception thrown by * {@link PackageFunction#compute}. */ private static class PackageFunctionException extends SkyFunctionException { public PackageFunctionException(NoSuchPackageException e, Transience transience) { super(e, transience); } } /** A simple value class to store the result of the Skylark imports.*/ private static final class SkylarkImportResult { private final Map<PathFragment, Extension> importMap; private final ImmutableList<Label> fileDependencies; private SkylarkImportResult( Map<PathFragment, Extension> importMap, ImmutableList<Label> fileDependencies) { this.importMap = importMap; this.fileDependencies = fileDependencies; } } }
src/main/java/com/google/devtools/build/lib/skyframe/PackageFunction.java
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.base.Preconditions; import com.google.common.cache.Cache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.cmdline.PackageIdentifier.RepositoryName; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.events.StoredEventHandler; import com.google.devtools.build.lib.packages.BuildFileContainsErrorsException; import com.google.devtools.build.lib.packages.BuildFileNotFoundException; import com.google.devtools.build.lib.packages.CachingPackageLocator; import com.google.devtools.build.lib.packages.InvalidPackageNameException; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.packages.Package; import com.google.devtools.build.lib.packages.Package.LegacyBuilder; import com.google.devtools.build.lib.packages.PackageFactory; import com.google.devtools.build.lib.packages.PackageFactory.Globber; import com.google.devtools.build.lib.packages.Preprocessor; import com.google.devtools.build.lib.packages.Preprocessor.Result; import com.google.devtools.build.lib.packages.RuleVisibility; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.profiler.Profiler; import com.google.devtools.build.lib.profiler.ProfilerTask; import com.google.devtools.build.lib.skyframe.ASTFileLookupValue.ASTLookupInputException; import com.google.devtools.build.lib.skyframe.GlobValue.InvalidGlobPatternException; import com.google.devtools.build.lib.skyframe.SkylarkImportLookupFunction.SkylarkImportFailedException; import com.google.devtools.build.lib.syntax.BuildFileAST; import com.google.devtools.build.lib.syntax.Environment.Extension; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.ParserInputSource; import com.google.devtools.build.lib.syntax.Statement; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionException.Transience; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.ValueOrException3; import com.google.devtools.build.skyframe.ValueOrException4; import com.google.devtools.build.skyframe.ValueOrExceptionUtils; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.Nullable; /** * A SkyFunction for {@link PackageValue}s. */ public class PackageFunction implements SkyFunction { private final PackageFactory packageFactory; private final CachingPackageLocator packageLocator; private final Cache<PackageIdentifier, Package.LegacyBuilder> packageFunctionCache; private final Cache<PackageIdentifier, Preprocessor.Result> preprocessCache; private final AtomicBoolean showLoadingProgress; private final AtomicInteger numPackagesLoaded; private final Profiler profiler = Profiler.instance(); private final PathFragment preludePath; // Not final only for testing. @Nullable private SkylarkImportLookupFunction skylarkImportLookupFunctionForInlining; static final PathFragment DEFAULTS_PACKAGE_NAME = new PathFragment("tools/defaults"); public PackageFunction( PackageFactory packageFactory, CachingPackageLocator pkgLocator, AtomicBoolean showLoadingProgress, Cache<PackageIdentifier, LegacyBuilder> packageFunctionCache, Cache<PackageIdentifier, Result> preprocessCache, AtomicInteger numPackagesLoaded, @Nullable SkylarkImportLookupFunction skylarkImportLookupFunctionForInlining) { this.skylarkImportLookupFunctionForInlining = skylarkImportLookupFunctionForInlining; // Can be null in tests. this.preludePath = packageFactory == null ? null : packageFactory.getRuleClassProvider().getPreludePath(); this.packageFactory = packageFactory; this.packageLocator = pkgLocator; this.showLoadingProgress = showLoadingProgress; this.packageFunctionCache = packageFunctionCache; this.preprocessCache = preprocessCache; this.numPackagesLoaded = numPackagesLoaded; } public void setSkylarkImportLookupFunctionForInliningForTesting( SkylarkImportLookupFunction skylarkImportLookupFunctionForInlining) { this.skylarkImportLookupFunctionForInlining = skylarkImportLookupFunctionForInlining; } private static void maybeThrowFilesystemInconsistency(PackageIdentifier packageIdentifier, Exception skyframeException, boolean packageWasInError) throws InternalInconsistentFilesystemException { if (!packageWasInError) { throw new InternalInconsistentFilesystemException(packageIdentifier, "Encountered error '" + skyframeException.getMessage() + "' but didn't encounter it when doing the same thing " + "earlier in the build"); } } /** * Marks the given dependencies, and returns those already present. Ignores any exception * thrown while building the dependency, except for filesystem inconsistencies. * * <p>We need to mark dependencies implicitly used by the legacy package loading code, but we * don't care about any skyframe errors since the package knows whether it's in error or not. */ private static Pair<? extends Map<PathFragment, PackageLookupValue>, Boolean> getPackageLookupDepsAndPropagateInconsistentFilesystemExceptions( PackageIdentifier packageIdentifier, Iterable<SkyKey> depKeys, Environment env, boolean packageWasInError) throws InternalInconsistentFilesystemException { Preconditions.checkState( Iterables.all(depKeys, SkyFunctions.isSkyFunction(SkyFunctions.PACKAGE_LOOKUP)), depKeys); boolean packageShouldBeInError = packageWasInError; ImmutableMap.Builder<PathFragment, PackageLookupValue> builder = ImmutableMap.builder(); for (Map.Entry<SkyKey, ValueOrException3<BuildFileNotFoundException, InconsistentFilesystemException, FileSymlinkException>> entry : env.getValuesOrThrow(depKeys, BuildFileNotFoundException.class, InconsistentFilesystemException.class, FileSymlinkException.class).entrySet()) { PathFragment pkgName = ((PackageIdentifier) entry.getKey().argument()).getPackageFragment(); try { PackageLookupValue value = (PackageLookupValue) entry.getValue().get(); if (value != null) { builder.put(pkgName, value); } } catch (BuildFileNotFoundException e) { maybeThrowFilesystemInconsistency(packageIdentifier, e, packageWasInError); } catch (InconsistentFilesystemException e) { throw new InternalInconsistentFilesystemException(packageIdentifier, e); } catch (FileSymlinkException e) { // Legacy doesn't detect symlink cycles. packageShouldBeInError = true; } } return Pair.of(builder.build(), packageShouldBeInError); } private static boolean markFileDepsAndPropagateInconsistentFilesystemExceptions( PackageIdentifier packageIdentifier, Iterable<SkyKey> depKeys, Environment env, boolean packageWasInError) throws InternalInconsistentFilesystemException { Preconditions.checkState( Iterables.all(depKeys, SkyFunctions.isSkyFunction(SkyFunctions.FILE)), depKeys); boolean packageShouldBeInError = packageWasInError; for (Map.Entry<SkyKey, ValueOrException3<IOException, FileSymlinkException, InconsistentFilesystemException>> entry : env.getValuesOrThrow(depKeys, IOException.class, FileSymlinkException.class, InconsistentFilesystemException.class).entrySet()) { try { entry.getValue().get(); } catch (IOException e) { maybeThrowFilesystemInconsistency(packageIdentifier, e, packageWasInError); } catch (FileSymlinkException e) { // Legacy doesn't detect symlink cycles. packageShouldBeInError = true; } catch (InconsistentFilesystemException e) { throw new InternalInconsistentFilesystemException(packageIdentifier, e); } } return packageShouldBeInError; } private static boolean markGlobDepsAndPropagateInconsistentFilesystemExceptions( PackageIdentifier packageIdentifier, Iterable<SkyKey> depKeys, Environment env, boolean packageWasInError) throws InternalInconsistentFilesystemException { Preconditions.checkState( Iterables.all(depKeys, SkyFunctions.isSkyFunction(SkyFunctions.GLOB)), depKeys); boolean packageShouldBeInError = packageWasInError; for (Map.Entry<SkyKey, ValueOrException4<IOException, BuildFileNotFoundException, FileSymlinkException, InconsistentFilesystemException>> entry : env.getValuesOrThrow(depKeys, IOException.class, BuildFileNotFoundException.class, FileSymlinkException.class, InconsistentFilesystemException.class).entrySet()) { try { entry.getValue().get(); } catch (IOException | BuildFileNotFoundException e) { maybeThrowFilesystemInconsistency(packageIdentifier, e, packageWasInError); } catch (FileSymlinkException e) { // Legacy doesn't detect symlink cycles. packageShouldBeInError = true; } catch (InconsistentFilesystemException e) { throw new InternalInconsistentFilesystemException(packageIdentifier, e); } } return packageShouldBeInError; } /** * Marks dependencies implicitly used by legacy package loading code, after the fact. Note that * the given package might already be in error. * * <p>Any skyframe exceptions encountered here are ignored, as similar errors should have * already been encountered by legacy package loading (if not, then the filesystem is * inconsistent). */ private static boolean markDependenciesAndPropagateInconsistentFilesystemExceptions( Environment env, Collection<Pair<String, Boolean>> globPatterns, Map<Label, Path> subincludes, PackageIdentifier packageIdentifier, boolean containsErrors) throws InternalInconsistentFilesystemException { boolean packageShouldBeInError = containsErrors; // TODO(bazel-team): This means that many packages will have to be preprocessed twice. Ouch! // We need a better continuation mechanism to avoid repeating work. [skyframe-loading] // TODO(bazel-team): It would be preferable to perform I/O from the package preprocessor via // Skyframe rather than add (potentially incomplete) dependencies after the fact. // [skyframe-loading] Set<SkyKey> subincludePackageLookupDepKeys = Sets.newHashSet(); for (Label label : subincludes.keySet()) { // Declare a dependency on the package lookup for the package giving access to the label. subincludePackageLookupDepKeys.add(PackageLookupValue.key(label.getPackageIdentifier())); } Pair<? extends Map<PathFragment, PackageLookupValue>, Boolean> subincludePackageLookupResult = getPackageLookupDepsAndPropagateInconsistentFilesystemExceptions( packageIdentifier, subincludePackageLookupDepKeys, env, containsErrors); Map<PathFragment, PackageLookupValue> subincludePackageLookupDeps = subincludePackageLookupResult.getFirst(); packageShouldBeInError |= subincludePackageLookupResult.getSecond(); List<SkyKey> subincludeFileDepKeys = Lists.newArrayList(); for (Entry<Label, Path> subincludeEntry : subincludes.entrySet()) { // Ideally, we would have a direct dependency on the target with the given label, but then // subincluding a file from the same package will cause a dependency cycle, since targets // depend on their containing packages. Label label = subincludeEntry.getKey(); PackageLookupValue subincludePackageLookupValue = subincludePackageLookupDeps.get(label.getPackageFragment()); if (subincludePackageLookupValue != null) { // Declare a dependency on the actual file that was subincluded. Path subincludeFilePath = subincludeEntry.getValue(); if (subincludeFilePath != null && !subincludePackageLookupValue.packageExists()) { // Legacy blaze puts a non-null path when only when the package does indeed exist. throw new InternalInconsistentFilesystemException( packageIdentifier, String.format( "Unexpected package in %s. Was it modified during the build?", subincludeFilePath)); } if (subincludePackageLookupValue.packageExists()) { // Sanity check for consistency of Skyframe and legacy blaze. Path subincludeFilePathSkyframe = subincludePackageLookupValue.getRoot().getRelative(label.toPathFragment()); if (subincludeFilePath != null && !subincludeFilePathSkyframe.equals(subincludeFilePath)) { throw new InternalInconsistentFilesystemException( packageIdentifier, String.format( "Inconsistent package location for %s: '%s' vs '%s'. " + "Was the source tree modified during the build?", label.getPackageFragment(), subincludeFilePathSkyframe, subincludeFilePath)); } // The actual file may be under a different package root than the package being // constructed. SkyKey subincludeSkyKey = FileValue.key( RootedPath.toRootedPath( subincludePackageLookupValue.getRoot(), label.getPackageFragment().getRelative(label.getName()))); subincludeFileDepKeys.add(subincludeSkyKey); } } } packageShouldBeInError |= markFileDepsAndPropagateInconsistentFilesystemExceptions( packageIdentifier, subincludeFileDepKeys, env, containsErrors); // TODO(bazel-team): In the long term, we want to actually resolve the glob patterns within // Skyframe. For now, just logging the glob requests provides correct incrementality and // adequate performance. List<SkyKey> globDepKeys = Lists.newArrayList(); for (Pair<String, Boolean> globPattern : globPatterns) { String pattern = globPattern.getFirst(); boolean excludeDirs = globPattern.getSecond(); SkyKey globSkyKey; try { globSkyKey = GlobValue.key(packageIdentifier, pattern, excludeDirs, PathFragment.EMPTY_FRAGMENT); } catch (InvalidGlobPatternException e) { // Globs that make it to pkg.getGlobPatterns() should already be filtered for errors. throw new IllegalStateException(e); } globDepKeys.add(globSkyKey); } packageShouldBeInError |= markGlobDepsAndPropagateInconsistentFilesystemExceptions( packageIdentifier, globDepKeys, env, containsErrors); return packageShouldBeInError; } /** * Adds a dependency on the WORKSPACE file, representing it as a special type of package. * @throws PackageFunctionException if there is an error computing the workspace file or adding * its rules to the //external package. */ private SkyValue getExternalPackage(Environment env, Path packageLookupPath) throws PackageFunctionException { RootedPath workspacePath = RootedPath.toRootedPath( packageLookupPath, new PathFragment("WORKSPACE")); SkyKey workspaceKey = PackageValue.workspaceKey(workspacePath); PackageValue workspace = null; try { workspace = (PackageValue) env.getValueOrThrow(workspaceKey, IOException.class, FileSymlinkException.class, InconsistentFilesystemException.class, EvalException.class); } catch (IOException | FileSymlinkException | InconsistentFilesystemException | EvalException e) { throw new PackageFunctionException(new BadWorkspaceFileException(e.getMessage()), Transience.PERSISTENT); } if (workspace == null) { return null; } Package pkg = workspace.getPackage(); Event.replayEventsOn(env.getListener(), pkg.getEvents()); return new PackageValue(pkg); } @Override public SkyValue compute(SkyKey key, Environment env) throws PackageFunctionException, InterruptedException { PackageIdentifier packageId = (PackageIdentifier) key.argument(); PathFragment packageNameFragment = packageId.getPackageFragment(); String packageName = packageNameFragment.getPathString(); SkyKey packageLookupKey = PackageLookupValue.key(packageId); PackageLookupValue packageLookupValue; try { packageLookupValue = (PackageLookupValue) env.getValueOrThrow(packageLookupKey, BuildFileNotFoundException.class, InconsistentFilesystemException.class); } catch (BuildFileNotFoundException e) { throw new PackageFunctionException(e, Transience.PERSISTENT); } catch (InconsistentFilesystemException e) { // This error is not transient from the perspective of the PackageFunction. throw new PackageFunctionException( new InternalInconsistentFilesystemException(packageId, e), Transience.PERSISTENT); } if (packageLookupValue == null) { return null; } if (!packageLookupValue.packageExists()) { switch (packageLookupValue.getErrorReason()) { case NO_BUILD_FILE: case DELETED_PACKAGE: case NO_EXTERNAL_PACKAGE: throw new PackageFunctionException(new BuildFileNotFoundException(packageId, packageLookupValue.getErrorMsg()), Transience.PERSISTENT); case INVALID_PACKAGE_NAME: throw new PackageFunctionException(new InvalidPackageNameException(packageId, packageLookupValue.getErrorMsg()), Transience.PERSISTENT); default: // We should never get here. throw new IllegalStateException(); } } if (packageId.equals(Package.EXTERNAL_PACKAGE_IDENTIFIER)) { return getExternalPackage(env, packageLookupValue.getRoot()); } SkyKey externalPackageKey = PackageValue.key(Package.EXTERNAL_PACKAGE_IDENTIFIER); PackageValue externalPackage = (PackageValue) env.getValue(externalPackageKey); if (externalPackage == null) { return null; } Package externalPkg = externalPackage.getPackage(); if (externalPkg.containsErrors()) { throw new PackageFunctionException( new BuildFileContainsErrorsException(Package.EXTERNAL_PACKAGE_IDENTIFIER), Transience.PERSISTENT); } boolean isDefaultsPackage = packageNameFragment.equals(DEFAULTS_PACKAGE_NAME) && packageId.getRepository().isDefault(); PathFragment buildFileFragment = packageNameFragment.getChild("BUILD"); RootedPath buildFileRootedPath = RootedPath.toRootedPath(packageLookupValue.getRoot(), buildFileFragment); FileValue buildFileValue = null; if (!isDefaultsPackage) { try { buildFileValue = (FileValue) env.getValueOrThrow(FileValue.key(buildFileRootedPath), IOException.class, FileSymlinkException.class, InconsistentFilesystemException.class); } catch (IOException | FileSymlinkException | InconsistentFilesystemException e) { throw new IllegalStateException("Package lookup succeeded but encountered error when " + "getting FileValue for BUILD file directly.", e); } if (buildFileValue == null) { return null; } Preconditions.checkState(buildFileValue.exists(), "Package lookup succeeded but BUILD file doesn't exist"); } Path buildFilePath = buildFileRootedPath.asPath(); String replacementContents = null; if (isDefaultsPackage) { replacementContents = PrecomputedValue.DEFAULTS_PACKAGE_CONTENTS.get(env); if (replacementContents == null) { return null; } } RuleVisibility defaultVisibility = PrecomputedValue.DEFAULT_VISIBILITY.get(env); if (defaultVisibility == null) { return null; } ASTFileLookupValue astLookupValue = null; SkyKey astLookupKey = ASTFileLookupValue.key( PackageIdentifier.createInDefaultRepo(preludePath)); try { astLookupValue = (ASTFileLookupValue) env.getValueOrThrow(astLookupKey, ErrorReadingSkylarkExtensionException.class, InconsistentFilesystemException.class); } catch (ErrorReadingSkylarkExtensionException | InconsistentFilesystemException e) { throw new PackageFunctionException(new BadPreludeFileException(packageId, e.getMessage()), Transience.PERSISTENT); } if (astLookupValue == null) { return null; } List<Statement> preludeStatements = astLookupValue.getAST() == null ? ImmutableList.<Statement>of() : astLookupValue.getAST().getStatements(); ParserInputSource inputSource; if (replacementContents != null) { inputSource = ParserInputSource.create(replacementContents, buildFileFragment); } else { // Load the BUILD file AST and handle Skylark dependencies. This way BUILD files are // only loaded twice if there are unavailable Skylark or package dependencies or an // IOException occurs. Note that the BUILD files are still parsed two times. try { if (showLoadingProgress.get() && packageFunctionCache.getIfPresent(packageId) == null) { // TODO(bazel-team): don't duplicate the loading message if there are unavailable // Skylark dependencies. env.getListener().handle(Event.progress("Loading package: " + packageName)); } inputSource = ParserInputSource.create(buildFilePath, buildFileValue.getSize()); } catch (IOException e) { env.getListener().handle(Event.error(Location.fromFile(buildFilePath), e.getMessage())); // Note that we did this work, so we should conservatively report this error as transient. throw new PackageFunctionException(new BuildFileContainsErrorsException( packageId, e.getMessage()), Transience.TRANSIENT); } } Package.LegacyBuilder legacyPkgBuilder = loadPackage( externalPkg, inputSource, replacementContents, packageId, buildFilePath, buildFileFragment, defaultVisibility, preludeStatements, env); if (legacyPkgBuilder == null) { return null; } legacyPkgBuilder.buildPartial(); try { handleLabelsCrossingSubpackagesAndPropagateInconsistentFilesystemExceptions( packageLookupValue.getRoot(), packageId, legacyPkgBuilder, env); } catch (InternalInconsistentFilesystemException e) { packageFunctionCache.invalidate(packageId); throw new PackageFunctionException(e, e.isTransient() ? Transience.TRANSIENT : Transience.PERSISTENT); } if (env.valuesMissing()) { // The package we just loaded will be in the {@code packageFunctionCache} next when this // SkyFunction is called again. return null; } Collection<Pair<String, Boolean>> globPatterns = legacyPkgBuilder.getGlobPatterns(); Map<Label, Path> subincludes = legacyPkgBuilder.getSubincludes(); Event.replayEventsOn(env.getListener(), legacyPkgBuilder.getEvents()); boolean packageShouldBeConsideredInError; try { packageShouldBeConsideredInError = markDependenciesAndPropagateInconsistentFilesystemExceptions( env, globPatterns, subincludes, packageId, legacyPkgBuilder.containsErrors()); } catch (InternalInconsistentFilesystemException e) { packageFunctionCache.invalidate(packageId); throw new PackageFunctionException(e, e.isTransient() ? Transience.TRANSIENT : Transience.PERSISTENT); } if (env.valuesMissing()) { return null; } if (packageShouldBeConsideredInError) { legacyPkgBuilder.setContainsErrors(); } Package pkg = legacyPkgBuilder.finishBuild(); // We know this SkyFunction will not be called again, so we can remove the cache entry. packageFunctionCache.invalidate(packageId); return new PackageValue(pkg); } // TODO(bazel-team): this should take the AST so we don't parse the file twice. @Nullable private SkylarkImportResult discoverSkylarkImports( Path buildFilePath, PathFragment buildFileFragment, PackageIdentifier packageId, Environment env, ParserInputSource inputSource, List<Statement> preludeStatements) throws PackageFunctionException, InterruptedException { StoredEventHandler eventHandler = new StoredEventHandler(); BuildFileAST buildFileAST = BuildFileAST.parseBuildFile( inputSource, preludeStatements, eventHandler, /* parse python */ false); SkylarkImportResult importResult; if (eventHandler.hasErrors()) { importResult = new SkylarkImportResult( ImmutableMap.<PathFragment, Extension>of(), ImmutableList.<Label>of()); } else { importResult = fetchImportsFromBuildFile(buildFilePath, buildFileFragment, packageId, buildFileAST, env); } return importResult; } static SkyKey getImportKey( Map.Entry<Location, PathFragment> entry, PathFragment preludePath, PathFragment buildFileFragment, PackageIdentifier packageId) throws ASTLookupInputException { PathFragment importFile = entry.getValue(); // HACK: The prelude sometimes contains load() statements, which need to be resolved // relative to the prelude file. However, we don't have a good way to tell "this should come // from the main repository" in a load() statement, and we don't have a good way to tell if // a load() statement comes from the prelude, since we just prepend those statements before // the actual BUILD file. So we use this evil .endsWith() statement to figure it out. RepositoryName repository = entry.getKey().getPath().endsWith(preludePath) ? PackageIdentifier.DEFAULT_REPOSITORY_NAME : packageId.getRepository(); return SkylarkImportLookupValue.key(repository, buildFileFragment, importFile); } private static SkyKey getImportKeyAndMaybeThrowException( Map.Entry<Location, PathFragment> entry, PathFragment preludePath, PathFragment buildFileFragment, PackageIdentifier packageId) throws PackageFunctionException { try { return getImportKey(entry, preludePath, buildFileFragment, packageId); } catch (ASTLookupInputException e) { // The load syntax is bad in the BUILD file so BuildFileContainsErrorsException is OK. throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT); } } /** * Fetch the skylark loads for this BUILD file. If any of them haven't been computed yet, * returns null. */ @Nullable private SkylarkImportResult fetchImportsFromBuildFile( Path buildFilePath, PathFragment buildFileFragment, PackageIdentifier packageId, BuildFileAST buildFileAST, Environment env) throws PackageFunctionException, InterruptedException { ImmutableMap<Location, PathFragment> imports = buildFileAST.getImports(); Map<PathFragment, Extension> importMap = Maps.newHashMapWithExpectedSize(imports.size()); ImmutableList.Builder<SkylarkFileDependency> fileDependencies = ImmutableList.builder(); Map< SkyKey, ValueOrException4< SkylarkImportFailedException, InconsistentFilesystemException, ASTLookupInputException, BuildFileNotFoundException>> skylarkImportMap; Map<SkyKey, PathFragment> skylarkImports = Maps.newHashMapWithExpectedSize(imports.size()); if (skylarkImportLookupFunctionForInlining != null) { skylarkImportMap = Maps.newHashMapWithExpectedSize(imports.size()); for (Map.Entry<Location, PathFragment> entry : imports.entrySet()) { SkyKey importKey = getImportKeyAndMaybeThrowException(entry, preludePath, buildFileFragment, packageId); skylarkImports.put(importKey, entry.getValue()); ValueOrException4< SkylarkImportFailedException, InconsistentFilesystemException, ASTLookupInputException, BuildFileNotFoundException> lookupResult; try { SkyValue value = skylarkImportLookupFunctionForInlining.computeWithInlineCalls(importKey, env); if (value == null) { Preconditions.checkState(env.valuesMissing(), importKey); // Don't give up on computing. This is against the Skyframe contract, but we don't want // to pay the price of serializing all these calls, since they are fundamentally // independent. lookupResult = ValueOrExceptionUtils.ofNullValue(); } else { lookupResult = ValueOrExceptionUtils.ofValue(value); } } catch (SkyFunctionException e) { Exception cause = e.getCause(); if (cause instanceof SkylarkImportFailedException) { lookupResult = ValueOrExceptionUtils.ofExn1((SkylarkImportFailedException) cause); } else if (cause instanceof InconsistentFilesystemException) { lookupResult = ValueOrExceptionUtils.ofExn2((InconsistentFilesystemException) cause); } else if (cause instanceof ASTLookupInputException) { lookupResult = ValueOrExceptionUtils.ofExn3((ASTLookupInputException) cause); } else if (cause instanceof BuildFileNotFoundException) { lookupResult = ValueOrExceptionUtils.ofExn4((BuildFileNotFoundException) cause); } else { throw new IllegalStateException("Unexpected type for " + importKey, e); } } skylarkImportMap.put(importKey, lookupResult); } } else { for (Map.Entry<Location, PathFragment> entry : imports.entrySet()) { skylarkImports.put( getImportKeyAndMaybeThrowException(entry, preludePath, buildFileFragment, packageId), entry.getValue()); } skylarkImportMap = env.getValuesOrThrow( skylarkImports.keySet(), SkylarkImportFailedException.class, InconsistentFilesystemException.class, ASTLookupInputException.class, BuildFileNotFoundException.class); } for (Map.Entry< SkyKey, ValueOrException4< SkylarkImportFailedException, InconsistentFilesystemException, ASTLookupInputException, BuildFileNotFoundException>> entry : skylarkImportMap.entrySet()) { SkylarkImportLookupValue importLookupValue; try { importLookupValue = (SkylarkImportLookupValue) entry.getValue().get(); } catch (SkylarkImportFailedException e) { env.getListener().handle(Event.error(Location.fromFile(buildFilePath), e.getMessage())); throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT); } catch (InconsistentFilesystemException e) { throw new PackageFunctionException( new InternalInconsistentFilesystemException(packageId, e), Transience.PERSISTENT); } catch (ASTLookupInputException e) { // The load syntax is bad in the BUILD file so BuildFileContainsErrorsException is OK. throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, e.getMessage()), Transience.PERSISTENT); } catch (BuildFileNotFoundException e) { throw new PackageFunctionException(e, Transience.PERSISTENT); } if (importLookupValue == null) { Preconditions.checkState(env.valuesMissing(), entry); } else { importMap.put( skylarkImports.get(entry.getKey()), importLookupValue.getEnvironmentExtension()); fileDependencies.add(importLookupValue.getDependency()); } } if (env.valuesMissing()) { // There are unavailable Skylark dependencies. return null; } return new SkylarkImportResult(importMap, transitiveClosureOfLabels(fileDependencies.build())); } private ImmutableList<Label> transitiveClosureOfLabels( ImmutableList<SkylarkFileDependency> immediateDeps) { Set<Label> transitiveClosure = Sets.newHashSet(); transitiveClosureOfLabels(immediateDeps, transitiveClosure); return ImmutableList.copyOf(transitiveClosure); } private void transitiveClosureOfLabels( ImmutableList<SkylarkFileDependency> immediateDeps, Set<Label> transitiveClosure) { for (SkylarkFileDependency dep : immediateDeps) { if (transitiveClosure.add(dep.getLabel())) { transitiveClosureOfLabels(dep.getDependencies(), transitiveClosure); } } } @Nullable @Override public String extractTag(SkyKey skyKey) { return null; } private static void handleLabelsCrossingSubpackagesAndPropagateInconsistentFilesystemExceptions( Path pkgRoot, PackageIdentifier pkgId, Package.LegacyBuilder pkgBuilder, Environment env) throws InternalInconsistentFilesystemException { Set<SkyKey> containingPkgLookupKeys = Sets.newHashSet(); Map<Target, SkyKey> targetToKey = new HashMap<>(); for (Target target : pkgBuilder.getTargets()) { PathFragment dir = target.getLabel().toPathFragment().getParentDirectory(); PackageIdentifier dirId = PackageIdentifier.create(pkgId.getRepository(), dir); if (dir.equals(pkgId.getPackageFragment())) { continue; } SkyKey key = ContainingPackageLookupValue.key(dirId); targetToKey.put(target, key); containingPkgLookupKeys.add(key); } Map<Label, SkyKey> subincludeToKey = new HashMap<>(); for (Label subincludeLabel : pkgBuilder.getSubincludeLabels()) { PathFragment dir = subincludeLabel.toPathFragment().getParentDirectory(); PackageIdentifier dirId = PackageIdentifier.create(pkgId.getRepository(), dir); if (dir.equals(pkgId.getPackageFragment())) { continue; } SkyKey key = ContainingPackageLookupValue.key(dirId); subincludeToKey.put(subincludeLabel, key); containingPkgLookupKeys.add(ContainingPackageLookupValue.key(dirId)); } Map<SkyKey, ValueOrException3<BuildFileNotFoundException, InconsistentFilesystemException, FileSymlinkException>> containingPkgLookupValues = env.getValuesOrThrow( containingPkgLookupKeys, BuildFileNotFoundException.class, InconsistentFilesystemException.class, FileSymlinkException.class); if (env.valuesMissing()) { return; } for (Target target : ImmutableSet.copyOf(pkgBuilder.getTargets())) { SkyKey key = targetToKey.get(target); if (!containingPkgLookupValues.containsKey(key)) { continue; } ContainingPackageLookupValue containingPackageLookupValue = getContainingPkgLookupValueAndPropagateInconsistentFilesystemExceptions( pkgId, containingPkgLookupValues.get(key), env); if (maybeAddEventAboutLabelCrossingSubpackage(pkgBuilder, pkgRoot, target.getLabel(), target.getLocation(), containingPackageLookupValue)) { pkgBuilder.removeTarget(target); pkgBuilder.setContainsErrors(); } } for (Label subincludeLabel : pkgBuilder.getSubincludeLabels()) { SkyKey key = subincludeToKey.get(subincludeLabel); if (!containingPkgLookupValues.containsKey(key)) { continue; } ContainingPackageLookupValue containingPackageLookupValue = getContainingPkgLookupValueAndPropagateInconsistentFilesystemExceptions( pkgId, containingPkgLookupValues.get(key), env); if (maybeAddEventAboutLabelCrossingSubpackage(pkgBuilder, pkgRoot, subincludeLabel, /*location=*/null, containingPackageLookupValue)) { pkgBuilder.setContainsErrors(); } } } @Nullable private static ContainingPackageLookupValue getContainingPkgLookupValueAndPropagateInconsistentFilesystemExceptions( PackageIdentifier packageIdentifier, ValueOrException3<BuildFileNotFoundException, InconsistentFilesystemException, FileSymlinkException> containingPkgLookupValueOrException, Environment env) throws InternalInconsistentFilesystemException { try { return (ContainingPackageLookupValue) containingPkgLookupValueOrException.get(); } catch (BuildFileNotFoundException | FileSymlinkException e) { env.getListener().handle(Event.error(null, e.getMessage())); return null; } catch (InconsistentFilesystemException e) { throw new InternalInconsistentFilesystemException(packageIdentifier, e); } } private static boolean maybeAddEventAboutLabelCrossingSubpackage( Package.LegacyBuilder pkgBuilder, Path pkgRoot, Label label, @Nullable Location location, @Nullable ContainingPackageLookupValue containingPkgLookupValue) { if (containingPkgLookupValue == null) { return true; } if (!containingPkgLookupValue.hasContainingPackage()) { // The missing package here is a problem, but it's not an error from the perspective of // PackageFunction. return false; } PackageIdentifier containingPkg = containingPkgLookupValue.getContainingPackageName(); if (containingPkg.equals(label.getPackageIdentifier())) { // The label does not cross a subpackage boundary. return false; } if (!containingPkg.getPathFragment().startsWith( label.getPackageIdentifier().getPathFragment())) { // This label is referencing an imaginary package, because the containing package should // extend the label's package: if the label is //a/b:c/d, the containing package could be // //a/b/c or //a/b, but should never be //a. Usually such errors will be caught earlier, but // in some exceptional cases (such as a Python-aware BUILD file catching its own io // exceptions), it reaches here, and we tolerate it. return false; } PathFragment labelNameFragment = new PathFragment(label.getName()); String message = String.format("Label '%s' crosses boundary of subpackage '%s'", label, containingPkg); Path containingRoot = containingPkgLookupValue.getContainingPackageRoot(); if (pkgRoot.equals(containingRoot)) { PathFragment labelNameInContainingPackage = labelNameFragment.subFragment( containingPkg.getPackageFragment().segmentCount() - label.getPackageFragment().segmentCount(), labelNameFragment.segmentCount()); message += " (perhaps you meant to put the colon here: " + "'//" + containingPkg + ":" + labelNameInContainingPackage + "'?)"; } else { message += " (have you deleted " + containingPkg + "/BUILD? " + "If so, use the --deleted_packages=" + containingPkg + " option)"; } pkgBuilder.addEvent(Event.error(location, message)); return true; } /** * Constructs a {@link Package} object for the given package using legacy package loading. * Note that the returned package may be in error. * * <p>May return null if the computation has to be restarted. */ @Nullable private Package.LegacyBuilder loadPackage( Package externalPkg, ParserInputSource inputSource, @Nullable String replacementContents, PackageIdentifier packageId, Path buildFilePath, PathFragment buildFileFragment, RuleVisibility defaultVisibility, List<Statement> preludeStatements, Environment env) throws InterruptedException, PackageFunctionException { ParserInputSource replacementSource = replacementContents == null ? null : ParserInputSource.create(replacementContents, buildFilePath.asFragment()); Package.LegacyBuilder pkgBuilder = packageFunctionCache.getIfPresent(packageId); if (pkgBuilder == null) { profiler.startTask(ProfilerTask.CREATE_PACKAGE, packageId.toString()); try { Globber globber = packageFactory.createLegacyGlobber(buildFilePath.getParentDirectory(), packageId, packageLocator); Preprocessor.Result preprocessingResult = preprocessCache.getIfPresent(packageId); if (preprocessingResult == null) { try { preprocessingResult = replacementSource == null ? packageFactory.preprocess(packageId, inputSource, globber) : Preprocessor.Result.noPreprocessing(replacementSource); } catch (IOException e) { env .getListener() .handle( Event.error( Location.fromFile(buildFilePath), "preprocessing failed: " + e.getMessage())); throw new PackageFunctionException( new BuildFileContainsErrorsException(packageId, "preprocessing failed", e), Transience.TRANSIENT); } preprocessCache.put(packageId, preprocessingResult); } SkylarkImportResult importResult; try { importResult = discoverSkylarkImports( buildFilePath, buildFileFragment, packageId, env, preprocessingResult.result, preludeStatements); } catch (PackageFunctionException | InterruptedException e) { preprocessCache.invalidate(packageId); throw e; } if (importResult == null) { return null; } preprocessCache.invalidate(packageId); pkgBuilder = packageFactory.createPackageFromPreprocessingResult(externalPkg, packageId, buildFilePath, preprocessingResult, preprocessingResult.events, preludeStatements, importResult.importMap, importResult.fileDependencies, packageLocator, defaultVisibility, globber); numPackagesLoaded.incrementAndGet(); packageFunctionCache.put(packageId, pkgBuilder); } finally { profiler.completeTask(ProfilerTask.CREATE_PACKAGE); } } return pkgBuilder; } private static class InternalInconsistentFilesystemException extends NoSuchPackageException { private boolean isTransient; /** * Used to represent a filesystem inconsistency discovered outside the * {@link PackageFunction}. */ public InternalInconsistentFilesystemException(PackageIdentifier packageIdentifier, InconsistentFilesystemException e) { super(packageIdentifier, e.getMessage(), e); // This is not a transient error from the perspective of the PackageFunction. this.isTransient = false; } /** Used to represent a filesystem inconsistency discovered by the {@link PackageFunction}. */ public InternalInconsistentFilesystemException(PackageIdentifier packageIdentifier, String inconsistencyMessage) { this(packageIdentifier, new InconsistentFilesystemException(inconsistencyMessage)); this.isTransient = true; } public boolean isTransient() { return isTransient; } } private static class BadWorkspaceFileException extends NoSuchPackageException { private BadWorkspaceFileException(String message) { super( Package.EXTERNAL_PACKAGE_IDENTIFIER, "Error encountered while dealing with the WORKSPACE file: " + message); } } private static class BadPreludeFileException extends NoSuchPackageException { private BadPreludeFileException(PackageIdentifier packageIdentifier, String message) { super(packageIdentifier, "Error encountered while reading the prelude file: " + message); } } /** * Used to declare all the exception types that can be wrapped in the exception thrown by * {@link PackageFunction#compute}. */ private static class PackageFunctionException extends SkyFunctionException { public PackageFunctionException(NoSuchPackageException e, Transience transience) { super(e, transience); } } /** A simple value class to store the result of the Skylark imports.*/ private static final class SkylarkImportResult { private final Map<PathFragment, Extension> importMap; private final ImmutableList<Label> fileDependencies; private SkylarkImportResult( Map<PathFragment, Extension> importMap, ImmutableList<Label> fileDependencies) { this.importMap = importMap; this.fileDependencies = fileDependencies; } } }
Only open and read the BUILD file when we don't have a cached preprocessing result. This is a step in the right direction towards the goal of opening and reading each BUILD file exactly once. -- MOS_MIGRATED_REVID=105338761
src/main/java/com/google/devtools/build/lib/skyframe/PackageFunction.java
Only open and read the BUILD file when we don't have a cached preprocessing result.
Java
apache-2.0
a49fb0a1679e984f9678ed51e501a18da3cadca3
0
max2me/collect,srsudar/MamasDelRioAndroid,SDRC-India/collect,SDRC-India/collect,mapkon/collect,mapkon/collect
/* * Copyright (C) 2015 University of Washington * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.odk.collect.android.utilities; import android.text.Html; import java.util.regex.Matcher; import java.util.regex.Pattern; public class TextUtils { private static final String t = "TextUtils"; private TextUtils() { // static methods only } private static String markdownToHtml(String html) { // https://gist.github.com/jbroadway/2836900 // we try to be as strict as possible html = html.replaceAll("(\\*\\*|__)(.*?)(\\*\\*|__)", "<strong>$2</strong>"); html = html.replaceAll("(\\*|_)(.*?)(\\*|_)", "<em>$2</em>"); html = html.replaceAll("\\[([^\\[]+)\\]\\(([^\\)]+)\\)", "<a href=\"$2\">$1</a>"); StringBuffer headerOutput = new StringBuffer(); Matcher headerMatcher = Pattern.compile("(?m)^(#+)(.*)").matcher(html); while (headerMatcher.find()) { headerMatcher.appendReplacement(headerOutput, headerMatcher.quoteReplacement(createHeaderReplacement(headerMatcher))); } html = headerMatcher.appendTail(headerOutput).toString(); StringBuffer paragraphOutput = new StringBuffer(); Matcher paragraphMatcher = Pattern.compile("\\n([^\\n]+)\\n").matcher(html); while (paragraphMatcher.find()) { paragraphMatcher.appendReplacement(paragraphOutput, headerMatcher.quoteReplacement(createParagraphReplacement(paragraphMatcher))); } html = paragraphMatcher.appendTail(paragraphOutput).toString(); StringBuffer spanOutput = new StringBuffer(); Matcher spanMatcher = Pattern.compile("((&lt;)|<)span(.*?)((&gt;)|>)(.*?)((&lt;)|<)/span((&gt;)|>)").matcher(html); while (spanMatcher.find()) { spanMatcher.appendReplacement(spanOutput, headerMatcher.quoteReplacement(createSpanReplacement(spanMatcher))); } html = spanMatcher.appendTail(spanOutput).toString(); return html; } public static String createHeaderReplacement(Matcher matcher) { int level = matcher.group(1).length(); return "<h" + level + ">" + matcher.group(2).trim() + "</h" + level + ">\n"; } public static String createParagraphReplacement(Matcher matcher) { String line = matcher.group(1); String trimmed = line.trim(); if (trimmed.matches("^<\\/?(h|p)")) { return "\n" + line + "\n"; } return "\n<p>" + trimmed + "</p>\n"; } public static String createSpanReplacement(Matcher matcher) { String stylesText = matcher.group(3); stylesText = stylesText.replaceAll("style=[\"'](.*?)[\"']", "$1"); String[] styles = stylesText.trim().split(";"); StringBuffer stylesOutput = new StringBuffer(); for (int i = 0; i < styles.length; i++) { String[] stylesAttributes = styles[i].trim().split(":"); if (stylesAttributes[0].equals("color")) { stylesOutput.append(" color=\"" + stylesAttributes[1] + "\""); } if (stylesAttributes[0].equals("font-family")) { stylesOutput.append(" face=\"" + stylesAttributes[1] + "\""); } } return "<font" + stylesOutput + ">" + matcher.group(6).trim() + "</font>"; } // http://stackoverflow.com/a/10187511/152938 public static CharSequence trimTrailingWhitespace(CharSequence source) { if(source == null) return ""; int i = source.length(); // loop back to the first non-whitespace character while(--i >= 0 && Character.isWhitespace(source.charAt(i))) { } return source.subSequence(0, i+1); } public static CharSequence textToHtml(String text) { if ( text == null ) { return null; } return trimTrailingWhitespace(Html.fromHtml(markdownToHtml(text))); } }
collect_app/src/main/java/org/odk/collect/android/utilities/TextUtils.java
/* * Copyright (C) 2015 University of Washington * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.odk.collect.android.utilities; import android.text.Html; import android.text.Spanned; import java.util.regex.Matcher; import java.util.regex.Pattern; public class TextUtils { private static final String t = "TextUtils"; private TextUtils() { // static methods only } private static String markdownToHtml(String html) { // https://gist.github.com/jbroadway/2836900 // we try to be as strict as possible html = html.replaceAll("(\\*\\*|__)(.*?)(\\*\\*|__)", "<strong>$2</strong>"); html = html.replaceAll("(\\*|_)(.*?)(\\*|_)", "<em>$2</em>"); html = html.replaceAll("\\[([^\\[]+)\\]\\(([^\\)]+)\\)", "<a href=\"$2\">$1</a>"); StringBuffer headerOutput = new StringBuffer(); Matcher headerMatcher = Pattern.compile("(?m)^(#+)(.*)").matcher(html); while (headerMatcher.find()) { headerMatcher.appendReplacement(headerOutput, createHeaderReplacement(headerMatcher)); } html = headerMatcher.appendTail(headerOutput).toString(); StringBuffer paragraphOutput = new StringBuffer(); Matcher paragraphMatcher = Pattern.compile("\\n([^\\n]+)\\n").matcher(html); while (paragraphMatcher.find()) { paragraphMatcher.appendReplacement(paragraphOutput, createParagraphReplacement(paragraphMatcher)); } html = paragraphMatcher.appendTail(paragraphOutput).toString(); StringBuffer spanOutput = new StringBuffer(); Matcher spanMatcher = Pattern.compile("((&lt;)|<)span(.*?)((&gt;)|>)(.*?)((&lt;)|<)/span((&gt;)|>)").matcher(html); while (spanMatcher.find()) { spanMatcher.appendReplacement(spanOutput, createSpanReplacement(spanMatcher)); } html = spanMatcher.appendTail(spanOutput).toString(); return html; } public static String createHeaderReplacement(Matcher matcher) { int level = matcher.group(1).length(); return "<h" + level + ">" + matcher.group(2).trim() + "</h" + level + ">\n"; } public static String createParagraphReplacement(Matcher matcher) { String line = matcher.group(1); String trimmed = line.trim(); if (trimmed.matches("^<\\/?(h|p)")) { return "\n" + line + "\n"; } return "\n<p>" + trimmed + "</p>\n"; } public static String createSpanReplacement(Matcher matcher) { String stylesText = matcher.group(3); stylesText = stylesText.replaceAll("style=[\"'](.*?)[\"']", "$1"); String[] styles = stylesText.trim().split(";"); StringBuffer stylesOutput = new StringBuffer(); for (int i = 0; i < styles.length; i++) { String[] stylesAttributes = styles[i].trim().split(":"); if (stylesAttributes[0].equals("color")) { stylesOutput.append(" color=\"" + stylesAttributes[1] + "\""); } if (stylesAttributes[0].equals("font-family")) { stylesOutput.append(" face=\"" + stylesAttributes[1] + "\""); } } return "<font" + stylesOutput + ">" + matcher.group(6).trim() + "</font>"; } public static CharSequence textToHtml(String text) { // There's some terrible bug that displays all the text as the // opening tag if a tag is the first thing in the string // so we hack around it so it begins with something else // when we convert it if ( text == null ) { return null; } // terrible hack, just add some chars Spanned brokenHtml = Html.fromHtml("x" + markdownToHtml(text)); // after we have the good html, remove the chars CharSequence fixedHtml = brokenHtml.subSequence(1, brokenHtml.length()); return fixedHtml; } }
Fix for opendatakit/opendatakit#1224. Also trimming whitespace.
collect_app/src/main/java/org/odk/collect/android/utilities/TextUtils.java
Fix for opendatakit/opendatakit#1224. Also trimming whitespace.
Java
apache-2.0
fatal: invalid reference: FETCH_HEAD^
2a1193d4ed1a70e322614cea2c0ce0261eaa1d4f
128
patado/Sudoku
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package sudoku; /** * * @author patrick */ public class Admin { public static Grid ThroughList (Grid initialGrid){ for (int i=0 ; i<9 ; i++) //81 cells in total (9*9) for (int j=0 ; j<9 ; j++) initialGrid.checkCell(i, j); return initialGrid; } public static int checkIfFinished (Grid initialGrid){ int flag=0; for (int n=0 ; n<9 ; n++) for (int m=0 ; m<9 ; m++) if(initialGrid.getGrid(n, m)==0)//if any cell remains at 0, the resolution is not finished { flag=1; break; } return flag; } }
Admin.java
Console version
Admin.java
Console version
Java
apache-2.0
error: pathspec 'src/org/jgroups/protocols/FD_ALL.java' did not match any file(s) known to git
9e7ff71094a06ec22dff581c42b9fc043fe03bd6
1
slaskawi/JGroups,danberindei/JGroups,vjuranek/JGroups,ibrahimshbat/JGroups,pruivo/JGroups,kedzie/JGroups,tristantarrant/JGroups,deepnarsay/JGroups,ibrahimshbat/JGroups,TarantulaTechnology/JGroups,rvansa/JGroups,dimbleby/JGroups,pferraro/JGroups,ibrahimshbat/JGroups,Sanne/JGroups,pruivo/JGroups,tristantarrant/JGroups,rhusar/JGroups,pferraro/JGroups,kedzie/JGroups,belaban/JGroups,danberindei/JGroups,dimbleby/JGroups,rpelisse/JGroups,TarantulaTechnology/JGroups,danberindei/JGroups,vjuranek/JGroups,rpelisse/JGroups,slaskawi/JGroups,pferraro/JGroups,TarantulaTechnology/JGroups,ligzy/JGroups,belaban/JGroups,ligzy/JGroups,belaban/JGroups,deepnarsay/JGroups,rpelisse/JGroups,rhusar/JGroups,Sanne/JGroups,Sanne/JGroups,ligzy/JGroups,slaskawi/JGroups,rvansa/JGroups,pruivo/JGroups,dimbleby/JGroups,deepnarsay/JGroups,kedzie/JGroups,vjuranek/JGroups,ibrahimshbat/JGroups,rhusar/JGroups
package org.jgroups.protocols; import org.jgroups.stack.Protocol; import org.jgroups.*; import org.jgroups.util.*; import java.util.*; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.io.*; /** * Failure detection based on simple heartbeat protocol. Every member preiodically multicasts a heartbeat. Every member * also maintains a table of all members (minus itself). When data or a heartbeat from P are received, we reset the * timestamp for P to the current time. Periodically, we check for expired members, and suspect those. * @author Bela Ban * @version $Id: FD_ALL.java,v 1.1 2006/12/21 19:25:12 belaban Exp $ */ public class FD_ALL extends Protocol { /** Map of addresses and timestamps of last updates */ Map<Address,Long> timestamps=new ConcurrentHashMap<Address,Long>(); /** Number of milliseconds after which a HEARTBEAT is sent to the cluster */ long interval=3000; /** Number of milliseconds after which a node P is suspected if neither a heartbeat nor data were received from P */ long timeout=5000; Address local_addr=null; final List members=new ArrayList(); boolean shun=true; TimeScheduler timer=null; // task which multicasts HEARTBEAT message after 'interval' ms private HeartbeatSender heartbeat_sender=null; // task which checks for members exceeding timeout and suspects them private TimeoutChecker timeout_checker=null; private boolean tasks_running=false; protected int num_heartbeats=0; protected int num_suspect_events=0; final static String name="FD_ALL"; BoundedList suspect_history=new BoundedList(20); final Map<Address,Integer> invalid_pingers=new HashMap(7); // keys=Address, val=Integer (number of pings from suspected mbrs) final Lock lock=new ReentrantLock(); public String getName() {return FD_ALL.name;} public String getLocalAddress() {return local_addr != null? local_addr.toString() : "null";} public String getMembers() {return members != null? members.toString() : "null";} public int getNumberOfHeartbeatsSent() {return num_heartbeats;} public int getNumSuspectEventsGenerated() {return num_suspect_events;} public long getTimeout() {return timeout;} public void setTimeout(long timeout) {this.timeout=timeout;} public boolean isShun() {return shun;} public void setShun(boolean flag) {this.shun=flag;} public String printSuspectHistory() { StringBuilder sb=new StringBuilder(); for(Enumeration en=suspect_history.elements(); en.hasMoreElements();) { sb.append(new Date()).append(": ").append(en.nextElement()).append("\n"); } return sb.toString(); } public boolean setProperties(Properties props) { String str; super.setProperties(props); str=props.getProperty("timeout"); if(str != null) { timeout=Long.parseLong(str); props.remove("timeout"); } str=props.getProperty("interval"); if(str != null) { interval=Long.parseLong(str); props.remove("interval"); } str=props.getProperty("shun"); if(str != null) { shun=Boolean.valueOf(str).booleanValue(); props.remove("shun"); } if(props.size() > 0) { log.error("the following properties are not recognized: " + props); return false; } return true; } public void resetStats() { num_heartbeats=num_suspect_events=0; suspect_history.removeAll(); } public void init() throws Exception { if(stack != null && stack.timer != null) timer=stack.timer; else throw new Exception("timer cannot be retrieved from protocol stack"); } public void stop() { stopTasks(); } public void up(Event evt) { Message msg; Header hdr; Address sender; switch(evt.getType()) { case Event.SET_LOCAL_ADDRESS: local_addr=(Address)evt.getArg(); break; case Event.MSG: msg=(Message)evt.getArg(); hdr=(Header)msg.getHeader(name); // update(msg.getSrc()); // update when data is received too ? maybe a bit costly if(hdr == null) break; // message did not originate from FD_ALL layer, just pass up switch(hdr.type) { case Header.HEARTBEAT: // heartbeat request; send heartbeat ack sender=msg.getSrc(); if(trace) log.trace("received a a heartbeat from " + sender); // 2. Shun the sender of a HEARTBEAT message if that sender is not a member. This will cause // the sender to leave the group (and possibly rejoin it later) if(shun && sender != null && members != null && !members.contains(sender)) { shunInvalidHeartbeatSender(sender); break; } update(sender); // updates the heartbeat entry for 'sender' break; // don't pass up ! case Header.SUSPECT: if(trace) log.trace("[SUSPECT] suspect hdr is " + hdr); passDown(new Event(Event.SUSPECT, hdr.suspected_mbr)); passUp(new Event(Event.SUSPECT, hdr.suspected_mbr)); break; case Header.NOT_MEMBER: if(shun) { if(log.isDebugEnabled()) log.debug("[NOT_MEMBER] I'm being shunned; exiting"); passUp(new Event(Event.EXIT)); } break; } return; } passUp(evt); // pass up to the layer above us } public void down(Event evt) { switch(evt.getType()) { case Event.VIEW_CHANGE: View v=(View)evt.getArg(); handleViewChange(v); break; default: passDown(evt); break; } } private void startTasks() { startHeartbeatSender(); startTimeoutChecker(); tasks_running=true; } private void stopTasks() { stopTimeoutChecker(); stopHeartbeatSender(); tasks_running=false; } private void startTimeoutChecker() { lock.lock(); try { if(timeout_checker == null) { timeout_checker=new TimeoutChecker(); timer.add(timeout_checker); } } finally { lock.unlock(); } } private void startHeartbeatSender() { lock.lock(); try { if(heartbeat_sender == null) { heartbeat_sender=new HeartbeatSender(); timer.add(heartbeat_sender); } } finally { lock.unlock(); } } private void stopTimeoutChecker() { lock.lock(); try { if(timeout_checker != null) { timeout_checker.cancel(); timeout_checker=null; } } finally { lock.unlock(); } } private void stopHeartbeatSender() { lock.lock(); try { if(heartbeat_sender != null) { heartbeat_sender.cancel(); heartbeat_sender=null; } } finally { lock.unlock(); } } private void update(Address sender) { if(sender != null) timestamps.put(sender, Long.valueOf(System.currentTimeMillis())); } private void handleViewChange(View v) { Vector mbrs=v.getMembers(); members.clear(); members.addAll(mbrs); Set keys=timestamps.keySet(); keys.retainAll(mbrs); // remove all nodes which have left the cluster for(Iterator it=mbrs.iterator(); it.hasNext();) { // and add new members Address mbr=(Address)it.next(); if(!timestamps.containsKey(mbr)) { timestamps.put(mbr, Long.valueOf(System.currentTimeMillis())); } } if(!tasks_running && members.size() > 1) startTasks(); else if(tasks_running && members.size() < 2) stopTasks(); } /** * If sender is not a member, send a NOT_MEMBER to sender (after n pings received) */ private void shunInvalidHeartbeatSender(Address sender) { int num_pings=0; Message shun_msg; if(invalid_pingers.containsKey(sender)) { num_pings=invalid_pingers.get(sender).intValue(); if(num_pings >= 3) { if(log.isDebugEnabled()) log.debug(sender + " is not in " + members + " ! Shunning it"); shun_msg=new Message(sender, null, null); shun_msg.setFlag(Message.OOB); shun_msg.putHeader(name, new Header(Header.NOT_MEMBER)); passDown(new Event(Event.MSG, shun_msg)); invalid_pingers.remove(sender); } else { num_pings++; invalid_pingers.put(sender, new Integer(num_pings)); } } else { num_pings++; invalid_pingers.put(sender, Integer.valueOf(num_pings)); } } public static class Header extends org.jgroups.Header implements Streamable { public static final byte HEARTBEAT = 0; public static final byte SUSPECT = 1; public static final byte NOT_MEMBER = 2; // received as response by pinged mbr when we are not a member byte type=Header.HEARTBEAT; Address suspected_mbr=null; /** used for externalization */ public Header() { } public Header(byte type) { this.type=type; } public Header(byte type, Address suspect) { this(type); this.suspected_mbr=suspect; } public String toString() { switch(type) { case FD_ALL.Header.HEARTBEAT: return "heartbeat"; case FD_ALL.Header.SUSPECT: return "SUSPECT (suspected_mbr=" + suspected_mbr + ")"; case FD_ALL.Header.NOT_MEMBER: return "NOT_MEMBER"; default: return "unknown type (" + type + ")"; } } public void writeExternal(ObjectOutput out) throws IOException { out.writeByte(type); out.writeObject(suspected_mbr); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { type=in.readByte(); suspected_mbr=(Address)in.readObject(); } public long size() { int retval=Global.BYTE_SIZE; // type retval+=Util.size(suspected_mbr); return retval; } public void writeTo(DataOutputStream out) throws IOException { out.writeByte(type); Util.writeAddress(suspected_mbr, out); } public void readFrom(DataInputStream in) throws IOException, IllegalAccessException, InstantiationException { type=in.readByte(); suspected_mbr=Util.readAddress(in); } } /** * Class which periodically multicasts a HEARTBEAT message to the cluster */ class HeartbeatSender implements TimeScheduler.CancellableTask { boolean started=true; public void cancel() { started=false; } public boolean cancelled() { return !started; } public long nextInterval() { return interval; } public void run() { Message heartbeat=new Message(); // send to all heartbeat.setFlag(Message.OOB); Header hdr=new Header(Header.HEARTBEAT); heartbeat.putHeader(name, hdr); passDown(new Event(Event.MSG, heartbeat)); if(trace) log.trace("sent heartbeat to cluster"); num_heartbeats++; } public String toString() { return Boolean.toString(started); } } class TimeoutChecker extends HeartbeatSender { public void run() { Map.Entry entry; Object key; Long val; long current_time=System.currentTimeMillis(), diff; for(Iterator it=timestamps.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); key=entry.getKey(); val=(Long)entry.getValue(); diff=current_time - val.longValue(); if(diff > timeout) { if(trace) log.trace("haven't received a heartbeat from " + key + " for " + diff + " ms, suspecting it"); suspect((Address)key); } } } void suspect(Address mbr) { Message suspect_msg=new Message(); suspect_msg.setFlag(Message.OOB); Header hdr=new Header(Header.SUSPECT, mbr); suspect_msg.putHeader(name, hdr); passDown(new Event(Event.MSG, suspect_msg)); num_suspect_events++; } } }
src/org/jgroups/protocols/FD_ALL.java
first cut
src/org/jgroups/protocols/FD_ALL.java
first cut
Java
apache-2.0
error: pathspec 'jdk/src/test/java/jdk/util/ArrayTest.java' did not match any file(s) known to git
b3e7e1d868f1ef518d55b4cce76641aa95129ab8
1
whyDK37/pinenut,whyDK37/pinenut
package jdk.util; import java.util.ArrayList; import java.util.List; /** * Created by why on 2016/8/7. */ public class ArrayTest { private static String[] EMPTY_CHEESE_ARRAY = new String[2]; private static List<String> cheesesInStock = new ArrayList<>(); public static void main(String[] args) { cheesesInStock.add("1"); EMPTY_CHEESE_ARRAY[0] = "3"; EMPTY_CHEESE_ARRAY[1] = "4"; String[] rs = cheesesInStock.toArray(EMPTY_CHEESE_ARRAY); System.out.println("copy list item to array"); System.out.println("length:"+rs.length); for (String s:rs){ System.out.print(s+" "); } cheesesInStock.add("2"); cheesesInStock.add("3"); rs = cheesesInStock.toArray(EMPTY_CHEESE_ARRAY); System.out.println("copy array item to list"); System.out.println("length:"+rs.length); for (String s:rs){ System.out.print(s+" "); } EMPTY_CHEESE_ARRAY = new String[0]; cheesesInStock.clear(); rs = cheesesInStock.toArray(EMPTY_CHEESE_ARRAY); System.out.println("return empty array"); System.out.println("length:"+rs.length); for (String s:rs){ System.out.print(s+" "); } } }
jdk/src/test/java/jdk/util/ArrayTest.java
toArray test
jdk/src/test/java/jdk/util/ArrayTest.java
toArray test
Java
apache-2.0
error: pathspec 'concurrency/src/main/java/module-info.java' did not match any file(s) known to git
c52888467f75e9897f23965f544fad352ee3c8d4
1
Torchmind/Mango
/** * Provides utilities which simplify the interaction with concurrency related APIs. * * @author <a href="mailto:[email protected]">Johannes Donath</a> */ module com.torchmind.mango.concurrency { exports com.torchmind.mango.concurrency.lock; requires static com.github.spotbugs.annotations; }
concurrency/src/main/java/module-info.java
Created a basic module-info.
concurrency/src/main/java/module-info.java
Created a basic module-info.
Java
apache-2.0
error: pathspec 'src/test/java/com/fishercoder/_623Test.java' did not match any file(s) known to git
2b0ff7342eb1e6de78a6936886a28702612581a0
1
fishercoder1534/Leetcode,fishercoder1534/Leetcode,fishercoder1534/Leetcode,fishercoder1534/Leetcode,fishercoder1534/Leetcode
package com.fishercoder; import com.fishercoder.common.classes.TreeNode; import com.fishercoder.common.utils.CommonUtils; import com.fishercoder.common.utils.TreeUtils; import com.fishercoder.solutions._1727; import com.fishercoder.solutions._623; import org.junit.BeforeClass; import org.junit.Test; import java.util.Arrays; import static org.junit.Assert.assertEquals; public class _623Test { private static _623.Solution1 solution1; @BeforeClass public static void setup() { solution1 = new _623.Solution1(); } @Test public void test1() { TreeNode expected = TreeUtils.constructBinaryTree(Arrays.asList(4, 1, 1, 2, null, null, 6, 3, 1, 5)); TreeUtils.printBinaryTree(expected); TreeNode inputTree = TreeUtils.constructBinaryTree(Arrays.asList(4, 2, 6, 3, 1, 5)); TreeUtils.printBinaryTree(inputTree); assertEquals(expected, solution1.addOneRow(inputTree, 1, 2)); } @Test public void test2() { TreeNode expected = TreeUtils.constructBinaryTree(Arrays.asList(4, 2, null, 1, 1, 3, null, null, 1)); TreeUtils.printBinaryTree(expected); TreeNode inputTree = TreeUtils.constructBinaryTree(Arrays.asList(4, 2, null, 3, 1)); TreeUtils.printBinaryTree(inputTree); assertEquals(expected, solution1.addOneRow(inputTree, 1, 3)); } @Test public void test3() { TreeNode expected = TreeUtils.constructBinaryTree(Arrays.asList(4, 2, 5, 1, 1, 1, 1, 3, null, null, 1, 6, null, null, 7)); TreeUtils.printBinaryTree(expected); TreeNode inputTree = TreeUtils.constructBinaryTree(Arrays.asList(4, 2, 5, 3, 1, 6, 7)); TreeUtils.printBinaryTree(inputTree); TreeNode actual = solution1.addOneRow(inputTree, 1, 3); TreeUtils.printBinaryTree(actual); assertEquals(expected, actual); } }
src/test/java/com/fishercoder/_623Test.java
add tests for 623
src/test/java/com/fishercoder/_623Test.java
add tests for 623
Java
apache-2.0
error: pathspec 'bboss-core/src/org/frameworkset/spi/IOCContainerHolder.java' did not match any file(s) known to git
a6ae8c0699cb62a6f933d3853c72e92dfebf6abc
1
bbossgroups/bboss,bbossgroups/bboss,bbossgroups/bboss,bbossgroups/bboss,bbossgroups/bboss
package org.frameworkset.spi; /** * Copyright 2022 bboss * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * <p>Description: 用于保持当前应用进程根ioc容器对象</p> * <p></p> * <p>Copyright (c) 2020</p> * @Date 2022/5/20 * @author biaoping.yin * @version 1.0 */ public class IOCContainerHolder { private static BaseApplicationContext applicationContext; /** * 初始化容器对象 * @param applicationContext */ public static void setApplicationContext(BaseApplicationContext applicationContext){ IOCContainerHolder.applicationContext = applicationContext; } public static BaseApplicationContext getApplicationContext() { return applicationContext; } /** * 从容器中获取对象 * @param beanName * @param type * @param <T> * @return */ public static <T> T getTBeanObject(String beanName,Class<T> type) { return applicationContext.getTBeanObject(beanName,type); } }
bboss-core/src/org/frameworkset/spi/IOCContainerHolder.java
增加IOCContainerHolder工具类,用于保持当前应用进程根ioc容器对象
bboss-core/src/org/frameworkset/spi/IOCContainerHolder.java
增加IOCContainerHolder工具类,用于保持当前应用进程根ioc容器对象
Java
apache-2.0
error: pathspec 'modules/core/test/com/haulmont/cuba/core/JdkVersionTest.java' did not match any file(s) known to git
3f79172c38ce71c4cec0f8eccfd8e0ddc7c06a8a
1
cuba-platform/cuba,dimone-kun/cuba,dimone-kun/cuba,cuba-platform/cuba,dimone-kun/cuba,cuba-platform/cuba
/* * Copyright (c) 2008-2018 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.haulmont.cuba.core; import org.apache.commons.lang3.SystemUtils; import org.junit.Test; public class JdkVersionTest { @Test public void test() { if (SystemUtils.IS_JAVA_1_8) { System.out.println( "---------------test are running on JAVA 8---------------" ); } else if (SystemUtils.IS_JAVA_10) { System.out.println( "---------------test are running on JAVA 10---------------" ); } } }
modules/core/test/com/haulmont/cuba/core/JdkVersionTest.java
Support Java 9 and 10 for CUBA #910
modules/core/test/com/haulmont/cuba/core/JdkVersionTest.java
Support Java 9 and 10 for CUBA #910
Java
apache-2.0
error: pathspec 'netcat/src/test/java/com/github/dddpaul/netcat/MainFragmentTest.java' did not match any file(s) known to git
0509c81b3bb6f36ed91c0ae712fa93e01967c4f8
1
dddpaul/android-SimpleNetCat
package com.github.dddpaul.netcat; import android.widget.AutoCompleteTextView; import android.widget.Button; import com.github.dddpaul.netcat.ui.MainFragment; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import static org.robolectric.util.FragmentTestUtil.startFragment; @Config( emulateSdk = 18 ) @RunWith( RobolectricTestRunner.class ) public class MainFragmentTest extends Assert { private MainFragment fragment; @Before public void setUp() { fragment = new MainFragment(); } /** * Test connect button behaviour */ @Test public void testConnectToButton() { startFragment( fragment ); Button connectButton = (Button) fragment.getView().findViewById( R.id.b_connect ); assertNotNull( connectButton ); assertFalse( connectButton.isEnabled() ); AutoCompleteTextView connectToText = (AutoCompleteTextView) fragment.getView().findViewById( R.id.et_connect_to ); connectToText.setText( "some host" ); assertTrue( connectButton.isEnabled() ); } }
netcat/src/test/java/com/github/dddpaul/netcat/MainFragmentTest.java
Create sample fragment test
netcat/src/test/java/com/github/dddpaul/netcat/MainFragmentTest.java
Create sample fragment test
Java
apache-2.0
error: pathspec 'capstone/src/main/java/com/google/sps/servlets/DeleteDataServlet.java' did not match any file(s) known to git
470749a59f753e633247e2d2b666c8e3e38d8378
1
googleinterns/step126-2020,googleinterns/step126-2020,googleinterns/step126-2020,googleinterns/step126-2020,googleinterns/step126-2020
package com.google.sps.servlets; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.PreparedQuery; import com.google.appengine.api.datastore.Query; import java.io.IOException; import java.util.ArrayList; import java.util.List; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** Servlet that adds comments to datastore */ @WebServlet("/delete-data") public class DeleteDataServlet extends HttpServlet { @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException { DatastoreService datastore = DatastoreServiceFactory.getDatastoreService(); Query query = new Query("Response"); PreparedQuery results = datastore.prepare(query); List<Key> removeEntires = new ArrayList<Key>(); for (Entity e: results.asIterable()) { removeEntires.add(e.getKey()); } datastore.delete(removeEntires); response.sendRedirect("/index.html"); } }
capstone/src/main/java/com/google/sps/servlets/DeleteDataServlet.java
Adds servlet to delete entities for debugging purposes
capstone/src/main/java/com/google/sps/servlets/DeleteDataServlet.java
Adds servlet to delete entities for debugging purposes
Java
apache-2.0
error: pathspec 'org.jenetics/src/test/java/org/jenetics/util/XOR32ShiftRandomTest.java' did not match any file(s) known to git
756dc0be3c16c8c4859ef61c5b2fe3ba65628c7a
1
jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics
/* * Java Genetic Algorithm Library (@__identifier__@). * Copyright (c) @__year__@ Franz Wilhelmstötter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Author: * Franz Wilhelmstötter ([email protected]) */ package org.jenetics.util; import org.testng.annotations.DataProvider; /** * @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a> * @version @__version__@ &mdash; <em>$Date: 2014-01-20 $</em> * @since @__version__@ */ public class XOR32ShiftRandomTest extends RandomTestBase { @Override @DataProvider(name = "seededPRNGPair") protected Object[][] getSeededPRNGPair() { final long seed = math.random.seed(); return new Object[][]{ {new XOR32ShiftRandom(seed), new XOR32ShiftRandom(seed)}, {new XOR32ShiftRandom.ThreadSafe(seed), new XOR32ShiftRandom.ThreadSafe(seed)} }; } @Override @DataProvider(name = "PRNG") protected Object[][] getPRNG() { final long seed = math.random.seed(); return new Object[][]{ {new XOR32ShiftRandom(seed)}, {new XOR32ShiftRandom.ThreadSafe(seed)}, {new XOR32ShiftRandom.ThreadLocal().get()} }; } }
org.jenetics/src/test/java/org/jenetics/util/XOR32ShiftRandomTest.java
Adding test for 'XOR32ShiftRandom' class.
org.jenetics/src/test/java/org/jenetics/util/XOR32ShiftRandomTest.java
Adding test for 'XOR32ShiftRandom' class.
Java
apache-2.0
error: pathspec 'osmdroid-android/src/org/andnav/osm/views/overlay/ScaleBarOverlay.java' did not match any file(s) known to git
2724450d688f4a06d2be2bad94f0056e3b8074c7
1
microg/android_external_osmdroid,Sarfarazsajjad/osmdroid,beemogmbh/osmdroid,ak-67/osmdroid,Sarfarazsajjad/osmdroid,beemogmbh/osmdroid,DShamaev/osmdroid,sibext/osmdroid-1,osmdroid/osmdroid,1nv4d3r5/osmdroid,osmdroid/osmdroid,DT9/osmdroid,osmdroid/osmdroid,DShamaev/osmdroid,osmdroid/osmdroid,microg/android_external_osmdroid,fpoyer/osmdroid,prembasumatary/osmdroid,dozd/osmdroid,DT9/osmdroid,GeoODK/osmdroid,hyl1987419/osmdroid,1nv4d3r5/osmdroid,ak-67/osmdroid,prembasumatary/osmdroid,GeoODK/osmdroid,fpoyer/osmdroid,dozd/osmdroid,sibext/osmdroid-1,hyl1987419/osmdroid,mozilla/osmdroid
package org.andnav.osm.views.overlay; /** * ScaleBarOverlay.java * * Puts a scale bar in the top-left corner of the screen, offset by a configurable * number of pixels. The bar is scaled to 1-inch length by querying for the physical * DPI of the screen. The size of the bar is printed between the tick marks. A * vertical (longitude) scale can be enabled. Scale is printed in metric (kilometers, * meters), imperial (miles, feet) and nautical (nautical miles, feet). * * Author: Erik Burrows, Griffin Systems LLC * [email protected] * * Change Log: * 2010-10-08: Inclusion to osmdroid trunk * * License: * LGPL version 3 * http://www.gnu.org/licenses/lgpl.html * * Usage: * OpenStreetMapView map = new OpenStreetMapView(...); * ScaleBarOverlay scaleBar = new ScaleBarOverlay(this.getBaseContext(), map); * * scaleBar.setImperial(); // Metric by default * * * map.getOverlays().add(scaleBar); * * To Do List: * 1. Allow for top, bottom, left or right placement. * 2. Scale bar to precise displayed scale text after rounding. * */ import org.andnav.osm.DefaultResourceProxyImpl; import org.andnav.osm.ResourceProxy; import org.andnav.osm.util.GeoPoint; import org.andnav.osm.views.OpenStreetMapView; import org.andnav.osm.views.OpenStreetMapView.OpenStreetMapViewProjection; import org.andnav.osm.views.overlay.OpenStreetMapViewOverlay; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Picture; import android.graphics.Rect; import android.graphics.Paint.Style; public class ScaleBarOverlay extends OpenStreetMapViewOverlay { // =========================================================== // Fields // =========================================================== // Defaults boolean enabled = true; float xOffset = 10; float yOffset = 10; float lineWidth = 2; int textSize = 12; boolean imperial = false; boolean nautical = false; boolean latitudeBar = true; boolean longitudeBar = false; // Internal protected final OpenStreetMapView mapView; private Context context; protected final Picture scaleBarPicture = new Picture(); private final Matrix scaleBarMatrix = new Matrix(); private int lastZoomLevel = -1; float xdpi; float ydpi; int screenWidth; int screenHeight; // =========================================================== // Constructors // =========================================================== public ScaleBarOverlay(final Context ctx, final OpenStreetMapView mapView) { this(ctx, mapView, new DefaultResourceProxyImpl(ctx)); } public ScaleBarOverlay(final Context ctx, final OpenStreetMapView mapView, final ResourceProxy pResourceProxy) { super(pResourceProxy); this.mapView = mapView; this.context = ctx; xdpi = this.context.getResources().getDisplayMetrics().xdpi; ydpi = this.context.getResources().getDisplayMetrics().ydpi; screenWidth = this.context.getResources().getDisplayMetrics().widthPixels; screenHeight = this.context.getResources().getDisplayMetrics().heightPixels; } // =========================================================== // Getter & Setter // =========================================================== public void setScaleBarOffset(float x, float y) { xOffset = x; yOffset = y; } public void setLineWidth(float width) { this.lineWidth = width; } public void setTextSize(int size) { this.textSize = size; } public void setImperial() { this.imperial = true; this.nautical = false; createScaleBarPicture(); } public void setNautical() { this.nautical = true; this.imperial = false; createScaleBarPicture(); } public void setMetric() { this.nautical = false; this.imperial = false; createScaleBarPicture(); } public void setEnabled(boolean enabled) { this.enabled = enabled; } public void drawLatitudeScale(boolean latitude) { this.latitudeBar = latitude; } public void drawLongitudeScale(boolean longitude) { this.longitudeBar = longitude; } // =========================================================== // Methods from SuperClass/Interfaces // =========================================================== @Override protected void onDrawFinished(Canvas c, OpenStreetMapView osmv) {} @Override public void onDraw(final Canvas c, final OpenStreetMapView osmv) { if (this.enabled) { final int zoomLevel = osmv.getZoomLevel(); if (zoomLevel != lastZoomLevel) { lastZoomLevel = zoomLevel; createScaleBarPicture(); } this.scaleBarMatrix.setTranslate(-1 * (scaleBarPicture.getWidth() / 2 - 0.5f), -1 * (scaleBarPicture.getHeight() / 2 - 0.5f)); this.scaleBarMatrix.postTranslate(xdpi/2, ydpi/2 + (c.getHeight() - mapView.getHeight())); c.save(); c.setMatrix(scaleBarMatrix); c.drawPicture(scaleBarPicture); c.restore(); } } // =========================================================== // Methods // =========================================================== public void disableScaleBar() { this.enabled = false; } public boolean enableScaleBar() { return this.enabled = true; } private void createScaleBarPicture() { // We want the scale bar to be as long as the closest round-number miles/kilometers // to 1-inch at the latitude at the current center of the screen. OpenStreetMapViewProjection projection = mapView.getProjection(); if (projection == null) { return; } // Two points, 1-inch apart in x/latitude, centered on screen GeoPoint p1 = projection.fromPixels((screenWidth / 2) - (xdpi / 2), screenHeight/2); GeoPoint p2 = projection.fromPixels((screenWidth / 2) + (xdpi / 2), screenHeight/2); int xMetersPerInch = p1.distanceTo(p2); p1 = projection.fromPixels(screenWidth/2, (screenHeight / 2) - (ydpi / 2)); p2 = projection.fromPixels(screenWidth/2, (screenHeight / 2) + (ydpi / 2)); int yMetersPerInch = p1.distanceTo(p2); final Paint barPaint = new Paint(); barPaint.setColor(Color.BLACK); barPaint.setAntiAlias(true); barPaint.setStyle(Style.FILL); barPaint.setAlpha(255); final Paint textPaint = new Paint(); textPaint.setColor(Color.BLACK); textPaint.setAntiAlias(true); textPaint.setStyle(Style.FILL); textPaint.setAlpha(255); textPaint.setTextSize(textSize); final Canvas canvas = scaleBarPicture.beginRecording((int)xdpi, (int)ydpi); if (latitudeBar) { String xMsg = scaleBarLengthText(xMetersPerInch, imperial, nautical); Rect xTextRect = new Rect(); textPaint.getTextBounds(xMsg, 0, xMsg.length(), xTextRect); int textSpacing = (int)(xTextRect.height() / 5.0); canvas.drawRect(xOffset, yOffset, xOffset + xdpi, yOffset + lineWidth, barPaint); canvas.drawRect(xOffset + xdpi, yOffset, xOffset + xdpi + lineWidth, yOffset + xTextRect.height() + lineWidth + textSpacing, barPaint); if (! longitudeBar) { canvas.drawRect(xOffset, yOffset, xOffset + lineWidth, yOffset + xTextRect.height() + lineWidth + textSpacing, barPaint); } canvas.drawText(xMsg, xOffset + xdpi/2 - xTextRect.width()/2, yOffset + xTextRect.height() + lineWidth + textSpacing, textPaint); } if (longitudeBar) { String yMsg = scaleBarLengthText(yMetersPerInch, imperial, nautical); Rect yTextRect = new Rect(); textPaint.getTextBounds(yMsg, 0, yMsg.length(), yTextRect); int textSpacing = (int)(yTextRect.height() / 5.0); canvas.drawRect(xOffset, yOffset, xOffset + lineWidth, yOffset + ydpi, barPaint); canvas.drawRect(xOffset, yOffset + ydpi, xOffset + yTextRect.height() + lineWidth + textSpacing, yOffset + ydpi + lineWidth, barPaint); if (! latitudeBar) { canvas.drawRect(xOffset, yOffset, xOffset + yTextRect.height() + lineWidth + textSpacing, yOffset + lineWidth, barPaint); } float x = xOffset + yTextRect.height() + lineWidth + textSpacing; float y = yOffset + ydpi/2 + yTextRect.width()/2; canvas.rotate(-90, x, y); canvas.drawText(yMsg, x, y + textSpacing, textPaint); } scaleBarPicture.endRecording(); } private String scaleBarLengthText(int meters, boolean imperial, boolean nautical) { if (this.imperial) { if (meters >= 1609.344) { return ((int)(meters / 1609.344)) + "mi"; } else if (meters >= 1609.344/10) { return (((int)(meters / 160.9344)) / 10.0) + "mi"; } else { return ((int)(meters * 3.2808399)) + "ft"; } } else if (this.nautical) { if (meters >= 1852) { return ((int)(meters / 1852)) + "nm"; } else if (meters >= 1852/10) { return (((int)(meters / 185.2)) / 10.0) + "nm"; } else { return ((int)(meters * 3.2808399)) + "ft"; } } else { if (meters >= 1000) { return ((int)(meters / 1000)) + "km"; } else if (meters > 100) { return ((int)(meters / 100.0) / 10.0) + "km"; } else { return (int)meters + "m"; } } } }
osmdroid-android/src/org/andnav/osm/views/overlay/ScaleBarOverlay.java
Added ScaleBarOverlay class
osmdroid-android/src/org/andnav/osm/views/overlay/ScaleBarOverlay.java
Added ScaleBarOverlay class
Java
apache-2.0
error: pathspec 'src/main/java/org/javamoney/moneta/format/MonetaryAmountFormatSymbols.java' did not match any file(s) known to git
b5b8c2c90ce18bf52bd4e35fbf8ba7519d73158d
1
msgilligan/jsr354-ri,msgilligan/jsr354-ri
package org.javamoney.moneta.format; import javax.money.MonetaryAmount; import javax.money.format.MonetaryAmountFormat; /** *The {@link MonetaryAmountFormat} that uses the {@link MonetaryAmountSymbols} to format {@link MonetaryAmount}. * @author Otavio Santana * @see {@link MonetaryAmountSymbols} * @see {@link MonetaryAmountFormat} */ public interface MonetaryAmountFormatSymbols extends MonetaryAmountFormat { /** * Gets the {@link MonetaryAmountSymbols} used in this {@link MonetaryAmountFormatSymbols} * @return */ MonetaryAmountSymbols getAmountSymbols(); }
src/main/java/org/javamoney/moneta/format/MonetaryAmountFormatSymbols.java
Create the implementation that uses Symbols to parse MonetaryAmount
src/main/java/org/javamoney/moneta/format/MonetaryAmountFormatSymbols.java
Create the implementation that uses Symbols to parse MonetaryAmount
Java
apache-2.0
error: pathspec 'fiscoflex-rest/src/main/java/mx/fiscoflex/rs/persistence/UsuarioEntity.java' did not match any file(s) known to git
60302ac83ad599f656dcdd47aacad026eaf7d631
1
fiscoflex/erp,fiscoflex/erp,fiscoflex/erp,fiscoflex/erp
package mx.fiscoflex.rs.persistence; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; @Entity @Table(name = "Usuarios") public class UsuarioEntity { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "IdUsuario") private Integer idUsuario; @Column(name = "Nombre") private String nombre; @Column(name = "Email") private String email; @Column(name = "Password") private String password; @Column(name = "Activo") private Boolean activo; @Column(name = "IdPerfil") private Integer idPerfil; public Integer getIdUsuario() { return idUsuario; } public void setIdUsuario(Integer idUsuario) { this.idUsuario = idUsuario; } public String getNombre() { return nombre; } public void setNombre(String nombre) { this.nombre = nombre; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public Boolean getActivo() { return activo; } public void setActivo(Boolean activo) { this.activo = activo; } public Integer getIdPerfil() { return idPerfil; } public void setIdPerfil(Integer idPerfil) { this.idPerfil = idPerfil; } }
fiscoflex-rest/src/main/java/mx/fiscoflex/rs/persistence/UsuarioEntity.java
Clase UsuarioEntity
fiscoflex-rest/src/main/java/mx/fiscoflex/rs/persistence/UsuarioEntity.java
Clase UsuarioEntity
Java
apache-2.0
error: pathspec 'oscarexchange4j/src/main/java/com/caris/oscarexchange4j/theme/Position.java' did not match any file(s) known to git
58cb69897359515b7d2322aba76969d3821b7183
1
caris/OSCAR-js,caris/OSCAR-js,caris/OSCAR-js
/** * CARIS oscar - Open Spatial Component ARchitecture * * Copyright 2014 CARIS <http://www.caris.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.caris.oscarexchange4j.theme; /** * This class represents a latitude, longitude, zoom level position in Oscar. * * @author tcoburn * */ public class Position { /** * The latitude position. */ private double latitude; /** * The longitude position. */ private double longitude; /** * The zoom level. */ private int zoom; /** * Gets the latitude. * * @return the latitude */ public double getLatitude() { return latitude; } /** * Sets the latitude. * * @param latitude * the latitude to set */ public void setLatitude(double latitude) { this.latitude = latitude; } /** * Gets the longitude. * * @return the longitude */ public double getLongitude() { return longitude; } /** * Sets the longitude. * * @param longitude * the longitude to set */ public void setLongitude(double longitude) { this.longitude = longitude; } /** * Gets the zoom level. * * @return the zoom */ public int getZoom() { return zoom; } /** * Sets the zoom level. * * @param zoom * the zoom to set */ public void setZoom(int zoom) { this.zoom = zoom; } }
oscarexchange4j/src/main/java/com/caris/oscarexchange4j/theme/Position.java
SFD-132 Adding the Position object.
oscarexchange4j/src/main/java/com/caris/oscarexchange4j/theme/Position.java
SFD-132
Java
apache-2.0
error: pathspec 'catalogue/src/main/java/org/project/openbaton/catalogue/nfvo/PluginMessage.java' did not match any file(s) known to git
ba596d29aeb6e9b2baab3b9b0e8ae358051bdb29
1
openbaton/test-plugin,openbaton/test-plugin
package org.project.openbaton.catalogue.nfvo; import java.io.Serializable; import java.util.Collection; /** * Created by tce on 13.08.15. */ public class PluginMessage implements Serializable{ private String methodName; private Collection<Serializable> parameters; public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } public Collection<Serializable> getParameters() { return parameters; } public void setParameters(Collection<Serializable> parameters) { this.parameters = parameters; } @Override public String toString() { return "PluginMessage{" + "methodName='" + methodName + '\'' + ", parameters=" + parameters + '}'; } }
catalogue/src/main/java/org/project/openbaton/catalogue/nfvo/PluginMessage.java
renamed module exception to common, started to fill common module with common classes/interfaces and started pluginAgent implementation
catalogue/src/main/java/org/project/openbaton/catalogue/nfvo/PluginMessage.java
renamed module exception to common, started to fill common module with common classes/interfaces and started pluginAgent implementation
Java
apache-2.0
error: pathspec 'integration-testing/src/test/java/com/google/net/stubby/stub/StubConfigTest.java' did not match any file(s) known to git
ba653d4bd0f1ed212e8dc09e51ca5ffc4b06b5c8
1
anuraaga/grpc-java,dapengzhang0/grpc-java,jcanizales/grpc-java,SunilKumarAilneni/cmpe273Lab2,sanjayiyerkudaliprasannakumar/cmpe273submission-grpc-Lab2,dongc/grpc-java,louiscryan/grpc-java,winstar/grpc-java,eamonnmcmanus/grpc-java,elandau/grpc-java,mbarve31/CMPE273-Lab2,mingfly/grpc-java,brengarajalu/GrpcAPI,brengarajalu/GrpcAPI,grpc/grpc-java,nickethier/grpc-java,simonhorlick/grpc-java,NamrathaRamalingeGowda/cmpe273-lab2,LuminateWireless/grpc-java,stanley-cheung/grpc-java,elandau/grpc-java,carl-mastrangelo/grpc-java,yangjae/grpc-java,wyxacc/grpc-java,conchlee/grpc-java,rmichela/grpc-java,elandau/grpc-java,brengarajalu/GrpcAPI,vampiregod1996/grpc-java,dongc/grpc-java,dongc/grpc-java,grpc/grpc-java,sanjayiyerkudaliprasannakumar/cmpe273submission-grpc-Lab2,rmichela/grpc-java,fengshao0907/grpc-java,huangsihuan/grpc-java,nagkumar91/cmpe273-lab2-rpc,rmichela/grpc-java,huangsihuan/grpc-java,waykar-prashant/273-lab2,madongfly/grpc-java,elandau/grpc-java,sanjayiyerkudaliprasannakumar/cmpe273submission-grpc-Lab2,pieterjanpintens/grpc-java,nmittler/grpc-java,mbarve31/CMPE273-Lab2,moujian/grpc-java,izharraazi/grpc-Lab2,eamonnmcmanus/grpc-java,wrwg/grpc-java,madongfly/grpc-java,NamrathaRamalingeGowda/cmpe273-lab2,eonezhang/grpc-java,carl-mastrangelo/grpc-java,NamrathaRamalingeGowda/cmpe273-lab2,anupkher4/cmpe273-lab2,ejona86/grpc-java,louiscryan/grpc-java,jawajarsantosh/Lab2,dapengzhang0/grpc-java,anuraaga/grpc-java,aglne/grpc-java,waykar-prashant/273-lab2,brengarajalu/GrpcAPI,grpc/grpc-java,eonezhang/grpc-java,deepakrkole/cmpe273submission,nickethier/grpc-java,louiscryan/grpc-java,jcanizales/grpc-java,conchlee/grpc-java,conchlee/grpc-java,stanley-cheung/grpc-java,joshuabezaleel/grpc-java,zhangkun83/grpc-java,wyxacc/grpc-java,pieterjanpintens/grpc-java,joshuabezaleel/grpc-java,wyxacc/grpc-java,carl-mastrangelo/grpc-java,vampiregod1996/grpc-java,ejona86/grpc-java,fengshao0907/grpc-java,ybv/grpc-java,vampiregod1996/grpc-java,zpencer/grpc-java,moujian/grpc-java,jawajarsantosh/Lab2,anuraaga/grpc-java,stanley-cheung/grpc-java,eonezhang/grpc-java,sanjayiyerkudaliprasannakumar/cmpe273submission-grpc-Lab2,nagkumar91/cmpe273-lab2-rpc,waykar-prashant/273-lab2,yangjae/grpc-java,dapengzhang0/grpc-java,anupkher4/cmpe273-lab2,zpencer/grpc-java,LuminateWireless/grpc-java,fengshao0907/grpc-java,zpencer/grpc-java,mingfly/grpc-java,carl-mastrangelo/grpc-java,manasidesh2311/cmpe273_lab2,sylvestor88/CMPE273_Lab2,dapengzhang0/grpc-java,huangsihuan/grpc-java,joshuabezaleel/grpc-java,aglne/grpc-java,stanley-cheung/grpc-java,yangjae/grpc-java,mingfly/grpc-java,jawajarsantosh/Lab2,ejona86/grpc-java,nickethier/grpc-java,ejona86/grpc-java,SunilKumarAilneni/cmpe273Lab2,zhangkun83/grpc-java,brengarajalu/GrpcAPI,eamonnmcmanus/grpc-java,deepakrkole/cmpe273submission,nmittler/grpc-java,jcanizales/grpc-java,winstar/grpc-java,deepakrkole/cmpe273submission,LuminateWireless/grpc-java,zpencer/grpc-java,manasidesh2311/cmpe273_lab2,pieterjanpintens/grpc-java,ybv/grpc-java,aglne/grpc-java,zhangkun83/grpc-java,wrwg/grpc-java,simonhorlick/grpc-java,simonhorlick/grpc-java,SunilKumarAilneni/cmpe273Lab2,nagkumar91/cmpe273-lab2-rpc,ybv/grpc-java,zhangkun83/grpc-java,moujian/grpc-java,sylvestor88/CMPE273_Lab2,sylvestor88/CMPE273_Lab2,anupkher4/cmpe273-lab2,mbarve31/CMPE273-Lab2,izharraazi/grpc-Lab2,madongfly/grpc-java,nmittler/grpc-java,grpc/grpc-java,wrwg/grpc-java,sanjayiyerkudaliprasannakumar/cmpe273submission-grpc-Lab2,manasidesh2311/cmpe273_lab2,simonhorlick/grpc-java,izharraazi/grpc-Lab2,pieterjanpintens/grpc-java,rmichela/grpc-java,winstar/grpc-java
package com.google.net.stubby.stub; import static org.junit.Assert.assertEquals; import com.google.net.stubby.Call; import com.google.net.stubby.Channel; import com.google.net.stubby.MethodDescriptor; import com.google.net.stubby.testing.integration.TestServiceGrpc; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.util.concurrent.TimeUnit; /** * Tests for stub reconfiguration */ @RunWith(JUnit4.class) public class StubConfigTest { @Test public void testConfigureTimeout() { // Create a default stub TestServiceGrpc.TestServiceBlockingStub stub = TestServiceGrpc.newBlockingStub(new FakeChannel()); assertEquals(TimeUnit.SECONDS.toMicros(1), stub.getServiceDescriptor().fullDuplexCall.getTimeout()); // Reconfigure it stub = stub.configureNewStub() .setTimeout(2, TimeUnit.SECONDS) .build(); // New altered config assertEquals(TimeUnit.SECONDS.toMicros(2), stub.getServiceDescriptor().fullDuplexCall.getTimeout()); // Default config unchanged assertEquals(TimeUnit.SECONDS.toMicros(1), TestServiceGrpc.CONFIG.fullDuplexCall.getTimeout()); } private static class FakeChannel implements Channel { @Override public <ReqT, RespT> Call<ReqT, RespT> newCall(MethodDescriptor<ReqT, RespT> method) { return null; } } }
integration-testing/src/test/java/com/google/net/stubby/stub/StubConfigTest.java
Move StubConfigTest to third_party ------------- Created by MOE: http://code.google.com/p/moe-java MOE_MIGRATED_REVID=79430433
integration-testing/src/test/java/com/google/net/stubby/stub/StubConfigTest.java
Move StubConfigTest to third_party
Java
apache-2.0
error: pathspec 'Algorithms_Java_Implementation/src/com/akh/algorithms/leetcode/easy/lc189/RotateArray.java' did not match any file(s) known to git
db446bf92e7d5c00000f31e53caafa44536e031c
1
akhr/java,akhr/java,akhr/java,akhr/java
/** * @fileName: RotateArray.java * @author: Akhash Ramamurthy * @CreatedOn: Jun 23, 2019 * */ package com.akh.algorithms.leetcode.easy.lc189; import static org.junit.Assert.assertArrayEquals; import java.util.Arrays; import org.junit.Test; /** * @fileName: RotateArray.java * @author: Akhash Ramamurthy * @Created on: Jun 23, 2019 * */ public class RotateArray { // Time: O(N) Space: O(1) public static void rotate_best_1(int[] nums, int k) { int len = nums.length; k = k % len; //If k > len then do % to fit the big number into the available indexes (0 - nums.length) int count = 0; int currentIndx = 0; int currentVal = nums[0]; int nextIndx = 0; while(count < len) { nextIndx = (currentIndx + k) % len; int temp = nums[nextIndx]; nums[nextIndx] = currentVal; currentIndx = nextIndx; currentVal = temp; count++; } } // Time: O(N) Space: O(1) public static void rotate_best_2(int[] nums, int k) { // int len = nums.length; // k = k % len; //If k > len then do % to fit the big number into the available indexes (0 - nums.length) reverse(nums, 0, nums.length-1); reverse(nums, 0, k-1); reverse(nums, k, nums.length-1); } private static void reverse(int[] nums, int start, int end) { while(start < end) { int temp = nums[start]; nums[start] = nums[end]; nums[end] = temp; start++; end--; } } // Time: O(N) Space: O(N) public static void rotate_good(int[] nums, int k) { int len = nums.length; k = k % len; //If k > len then do % to fit the big number into the available indexes (0 - nums.length) int[] res = new int[nums.length]; int i = 0; int j = 0; while(i < len ) { j = (i+k < len) ? (i+k) : ((i+k)-len); res[j] = nums[i]; i++; } for (int indx=0; indx<len; indx++) { nums[indx] = res[indx]; } } @Test public void Test_101() { int[] nums = new int[]{1,2,3,4,5,6,7}; rotate_good(nums, 3); System.out.println(Arrays.toString(nums)); assertArrayEquals(new int[]{5,6,7,1,2,3,4}, nums); } @Test public void Test_102() { int[] nums = new int[]{1,2,3,4,5,6,7}; rotate_best_1(nums, 3); System.out.println(Arrays.toString(nums)); assertArrayEquals(new int[]{5,6,7,1,2,3,4}, nums); } @Test public void Test_103() { int[] nums = new int[]{1,2,3,4,5,6,7}; rotate_best_2(nums, 3); System.out.println(Arrays.toString(nums)); assertArrayEquals(new int[]{5,6,7,1,2,3,4}, nums); } @Test public void Test_104() { int[] nums = new int[]{4}; rotate_best_2(nums, 2); System.out.println(Arrays.toString(nums)); assertArrayEquals(new int[]{4}, nums); } }
Algorithms_Java_Implementation/src/com/akh/algorithms/leetcode/easy/lc189/RotateArray.java
Rotate an array by k spots in-place
Algorithms_Java_Implementation/src/com/akh/algorithms/leetcode/easy/lc189/RotateArray.java
Rotate an array by k spots in-place
Java
apache-2.0
error: pathspec 'appsearch/appsearch/src/androidTest/java/androidx/appsearch/app/cts/StorageInfoCtsTest.java' did not match any file(s) known to git
08b6b0a840b85a0fa01a74035b681ee1da1c680f
1
androidx/androidx,androidx/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx
/* * Copyright 2021 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.appsearch.app.cts; import static com.google.common.truth.Truth.assertThat; import androidx.appsearch.app.StorageInfo; import org.junit.Test; public class StorageInfoCtsTest { @Test public void testBuildStorageInfo() { StorageInfo storageInfo = new StorageInfo.Builder() .setAliveDocumentsCount(10) .setSizeBytes(1L) .setAliveNamespacesCount(10) .build(); assertThat(storageInfo.getAliveDocumentsCount()).isEqualTo(10); assertThat(storageInfo.getSizeBytes()).isEqualTo(1L); assertThat(storageInfo.getAliveNamespacesCount()).isEqualTo(10); } @Test public void testBuildStorageInfo_withDefaults() { StorageInfo storageInfo = new StorageInfo.Builder().build(); assertThat(storageInfo.getAliveDocumentsCount()).isEqualTo(0); assertThat(storageInfo.getSizeBytes()).isEqualTo(0L); assertThat(storageInfo.getAliveNamespacesCount()).isEqualTo(0); } }
appsearch/appsearch/src/androidTest/java/androidx/appsearch/app/cts/StorageInfoCtsTest.java
Add StorageInfo.Builder Cts Tests Bug: 184396708 Test: ./gradlew appsearch:appsearch:connectedCheck --info --daemon Change-Id: I3d8514d290440d35b4866f679a55dfe6acebf50a
appsearch/appsearch/src/androidTest/java/androidx/appsearch/app/cts/StorageInfoCtsTest.java
Add StorageInfo.Builder Cts Tests
Java
apache-2.0
error: pathspec 'core/sis-referencing/src/main/java/org/apache/sis/referencing/datum/DefaultPrimeMeridian.java' did not match any file(s) known to git
6344463966fb00b5e4368f3fc2914b383aafd7b8
1
apache/sis,apache/sis,apache/sis
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.referencing.datum; import java.util.Map; import java.util.HashMap; import java.util.Collections; import javax.measure.unit.Unit; import javax.measure.unit.NonSI; import javax.measure.quantity.Angle; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import org.opengis.referencing.datum.PrimeMeridian; import org.apache.sis.referencing.NamedIdentifier; import org.apache.sis.referencing.AbstractIdentifiedObject; import org.apache.sis.metadata.iso.citation.Citations; import org.apache.sis.internal.util.Numerics; import org.apache.sis.io.wkt.Formatter; import org.apache.sis.util.ComparisonMode; import org.apache.sis.util.Immutable; import static org.apache.sis.util.ArgumentChecks.ensureFinite; import static org.apache.sis.util.ArgumentChecks.ensureNonNull; // Related to JDK7 import java.util.Objects; /** * A prime meridian defines the origin from which longitude values are determined. * * @author Martin Desruisseaux (IRD, Geomatys) * @author Cédric Briançon (Geomatys) * @since 0.3 (derived from geotk-1.2) * @version 0.3 * @module */ @Immutable @XmlType(name = "PrimeMeridianType") @XmlRootElement(name = "PrimeMeridian") public class DefaultPrimeMeridian extends AbstractIdentifiedObject implements PrimeMeridian { /** * Serial number for inter-operability with different versions. */ private static final long serialVersionUID = 541978454643213305L;; /** * The Greenwich meridian (EPSG:8901), with angular measurements in decimal degrees. */ public static final DefaultPrimeMeridian GREENWICH; static { final Map<String,Object> properties = new HashMap<>(4); properties.put(NAME_KEY, "Greenwich"); // Name is fixed by ISO 19111. properties.put(IDENTIFIERS_KEY, new NamedIdentifier(Citations.EPSG, "8901")); GREENWICH = new DefaultPrimeMeridian(properties, 0, NonSI.DEGREE_ANGLE); } /** * Longitude of the prime meridian measured from the Greenwich meridian, positive eastward. */ @XmlElement(required = true) private final double greenwichLongitude; /** * The angular unit of the {@linkplain #getGreenwichLongitude() Greenwich longitude}. */ private final Unit<Angle> angularUnit; /** * Constructs a new object in which every attributes are set to a default value. * <strong>This is not a valid object.</strong> This constructor is strictly * reserved to JAXB, which will assign values to the fields using reflexion. */ private DefaultPrimeMeridian() { this(GREENWICH); } /** * Constructs a new prime meridian with the same values than the specified one. * This copy constructor provides a way to convert an arbitrary implementation into a SIS one * or a user-defined one (as a subclass), usually in order to leverage some implementation-specific API. * * <p>This constructor performs a shallow copy, i.e. the properties are not cloned.</p> * * @param meridian The prime meridian to copy. */ public DefaultPrimeMeridian(final PrimeMeridian meridian) { super(meridian); greenwichLongitude = meridian.getGreenwichLongitude(); angularUnit = meridian.getAngularUnit(); } /** * Constructs a prime meridian from a name and Greenwich longitude. * The {@code greenwichLongitude} value is assumed in {@linkplain NonSI#DEGREE_ANGLE decimal degrees}. * * @param name The datum name. * @param greenwichLongitude The longitude value relative to the Greenwich Meridian, in degrees. */ public DefaultPrimeMeridian(final String name, final double greenwichLongitude) { this(name, greenwichLongitude, NonSI.DEGREE_ANGLE); } /** * Constructs a prime meridian from a name, Greenwich longitude and angular unit. * * @param name The datum name. * @param greenwichLongitude The longitude value relative to the Greenwich Meridian. * @param angularUnit The angular unit of the longitude, in degrees. */ public DefaultPrimeMeridian(final String name, final double greenwichLongitude, final Unit<Angle> angularUnit) { this(Collections.singletonMap(NAME_KEY, name), greenwichLongitude, angularUnit); } /** * Constructs a prime meridian from a set of properties. The properties map is given * unchanged to the {@linkplain AbstractIdentifiedObject#AbstractIdentifiedObject(Map) * super-class constructor}. * * @param properties Set of properties. Should contains at least {@code "name"}. * @param greenwichLongitude The longitude value relative to the Greenwich Meridian. * @param angularUnit The angular unit of the longitude. */ public DefaultPrimeMeridian(final Map<String,?> properties, final double greenwichLongitude, final Unit<Angle> angularUnit) { super(properties); ensureFinite("greenwichLongitude", greenwichLongitude); ensureNonNull("angularUnit", angularUnit); this.greenwichLongitude = greenwichLongitude; this.angularUnit = angularUnit; } /** * Returns a SIS prime meridian implementation with the same values than the given arbitrary implementation. * If the given object is {@code null}, then this method returns {@code null}. * Otherwise if the given object is already a SIS implementation, then the given object is returned unchanged. * Otherwise a new SIS implementation is created and initialized to the attribute values of the given object. * * @param object The object to get as a SIS implementation, or {@code null} if none. * @return A SIS implementation containing the values of the given object (may be the * given object itself), or {@code null} if the argument was null. */ public static DefaultPrimeMeridian castOrCopy(final PrimeMeridian object) { return (object == null) || (object instanceof DefaultPrimeMeridian) ? (DefaultPrimeMeridian) object : new DefaultPrimeMeridian(object); } /** * Longitude of the prime meridian measured from the Greenwich meridian, positive eastward. * * @return The prime meridian Greenwich longitude, in {@linkplain #getAngularUnit() angular unit}. */ @Override public double getGreenwichLongitude() { return greenwichLongitude; } /** * Returns the longitude value relative to the Greenwich Meridian, expressed in the specified units. * This convenience method makes it easier to obtain longitude in decimal degrees using the following * code, regardless of the underlying angular units of this prime meridian: * * {@preformat java * double longitudeInDegrees = primeMeridian.getGreenwichLongitude(NonSI.DEGREE_ANGLE); * } * * @param targetUnit The unit in which to express longitude. * @return The Greenwich longitude in the given units. */ public double getGreenwichLongitude(final Unit<Angle> targetUnit) { return getAngularUnit().getConverterTo(targetUnit).convert(getGreenwichLongitude()); } /** * Returns the angular unit of the Greenwich longitude. * * @return The angular unit of the {@linkplain #getGreenwichLongitude() Greenwich longitude}. */ @Override public Unit<Angle> getAngularUnit() { return angularUnit; } /** * Compares this prime meridian with the specified object for equality. * * @param object The object to compare to {@code this}. * @param mode {@link ComparisonMode#STRICT STRICT} for performing a strict comparison, or * {@link ComparisonMode#IGNORE_METADATA IGNORE_METADATA} for comparing only properties * relevant to transformations. * @return {@code true} if both objects are equal. */ @Override public boolean equals(final Object object, final ComparisonMode mode) { if (object == this) { return true; // Slight optimization. } if (super.equals(object, mode)) { switch (mode) { case STRICT: { final DefaultPrimeMeridian that = (DefaultPrimeMeridian) object; return Numerics.equals(this.greenwichLongitude, that.greenwichLongitude) && Objects.equals(this.angularUnit, that.angularUnit); } case BY_CONTRACT: { if (!(object instanceof PrimeMeridian)) break; final PrimeMeridian that = (PrimeMeridian) object; return Numerics.equals(getGreenwichLongitude(), that.getGreenwichLongitude()) && Objects.equals(getAngularUnit(), that.getAngularUnit()); } default: { if (!(object instanceof PrimeMeridian)) break; final DefaultPrimeMeridian that = castOrCopy((PrimeMeridian) object); return Numerics.epsilonEqual(this.getGreenwichLongitude(NonSI.DEGREE_ANGLE), that.getGreenwichLongitude(NonSI.DEGREE_ANGLE), mode); /* * Note: if mode==IGNORE_METADATA, we relax the unit check because EPSG uses * sexagesimal degrees for the Greenwich meridian. Requirying the same * unit prevent Geodetic.isWGS84(...) method to recognize EPSG's WGS84. */ } } } return false; } /** * {@inheritDoc} */ @Override protected int computeHashCode() { return Numerics.hash(greenwichLongitude, super.computeHashCode()); } /** * Formats the inner part of a<cite>Well Known Text</cite> (WKT) element. * * @param formatter The formatter to use. * @return The WKT element name, which is {@code "PRIMEM"}. */ @Override @SuppressWarnings("fallthrough") public String formatTo(final Formatter formatter) { /* * If the PrimeMeridian is written inside a "GEOGCS", then OGC say that it must be * written in the unit of the enclosing geographic coordinate system. Otherwise, * default to decimal degrees. Note that ESRI and GDAL don't follow this rule. */ Unit<Angle> context = formatter.getConvention().getForcedUnit(Angle.class); if (context == null) { context = formatter.getAngularUnit(); if (context == null) { context = NonSI.DEGREE_ANGLE; } } formatter.append(getGreenwichLongitude(context)); return "PRIMEM"; } }
core/sis-referencing/src/main/java/org/apache/sis/referencing/datum/DefaultPrimeMeridian.java
Missed that file in the "Ported DefaultPrimeMeridian" commit. git-svn-id: 38d176a088663cba44dd40f58e77e6472acb7674@1519592 13f79535-47bb-0310-9956-ffa450edef68
core/sis-referencing/src/main/java/org/apache/sis/referencing/datum/DefaultPrimeMeridian.java
Missed that file in the "Ported DefaultPrimeMeridian" commit.
Java
apache-2.0
error: pathspec 'drools-guvnor/src/main/java/org/drools/guvnor/server/security/RoleBasedPermissionManager.java' did not match any file(s) known to git
eb07265b977c45891f65f8c81e84d5d81e51f272
1
kiereleaseuser/guvnor,etirelli/guvnor,yurloc/guvnor,wmedvede/guvnor,etirelli/guvnor,hxf0801/guvnor,kiereleaseuser/guvnor,etirelli/guvnor,hxf0801/guvnor,Rikkola/guvnor,cristianonicolai/guvnor,adrielparedes/guvnor,cristianonicolai/guvnor,nmirasch/guvnor,baldimir/guvnor,psiroky/guvnor,psiroky/guvnor,cristianonicolai/guvnor,porcelli-forks/guvnor,nmirasch/guvnor,yurloc/guvnor,psiroky/guvnor,wmedvede/guvnor,mswiderski/guvnor,Rikkola/guvnor,mbiarnes/guvnor,baldimir/guvnor,adrielparedes/guvnor,porcelli-forks/guvnor,droolsjbpm/guvnor,Rikkola/guvnor,mbiarnes/guvnor,baldimir/guvnor,droolsjbpm/guvnor,porcelli-forks/guvnor,nmirasch/guvnor,mbiarnes/guvnor,hxf0801/guvnor,wmedvede/guvnor,droolsjbpm/guvnor,kiereleaseuser/guvnor,adrielparedes/guvnor
package org.drools.guvnor.server.security; /* * Copyright 2005 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.List; import org.jboss.seam.Component; import org.jboss.seam.ScopeType; import org.jboss.seam.annotations.AutoCreate; import org.jboss.seam.annotations.Create; import org.jboss.seam.annotations.Destroy; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Scope; import org.jboss.seam.annotations.Unwrap; import org.jboss.seam.contexts.Contexts; import org.jboss.seam.security.Identity; /** * This enhances the BRMS repository for lifecycle management. * @author Chester Woo/Jervis Liu */ @Scope(ScopeType.SESSION) @AutoCreate @Name("roleBasedPermissionManager") public class RoleBasedPermissionManager { // @In List<RoleBasedPermission> permissions; // @Unwrap public List<RoleBasedPermission> getRoleBasedPermission() { return permissions; } @Create public void create() { String userName = ""; if (Contexts.isApplicationContextActive()) { userName = Identity.instance().getCredentials().getUsername(); } RoleBasedPermissionStore pbps = (RoleBasedPermissionStore) Component .getInstance("org.drools.guvnor.server.security.RoleBasedPermissionStore"); permissions = pbps.getRoleBasedPermissionsByUserName(Identity .instance().getCredentials().getUsername()); } @Destroy public void close() { //TO-DO } }
drools-guvnor/src/main/java/org/drools/guvnor/server/security/RoleBasedPermissionManager.java
Missing one class in my previous commit git-svn-id: a243bed356d289ca0d1b6d299a0597bdc4ecaa09@21568 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70
drools-guvnor/src/main/java/org/drools/guvnor/server/security/RoleBasedPermissionManager.java
Missing one class in my previous commit
Java
apache-2.0
error: pathspec 'modules/indexing/src/main/java/org/gridgain/grid/kernal/processors/query/h2/sql/GridSqlQuerySplitter.java' did not match any file(s) known to git
ebd548ca9c5c1cbe0da1c20a6f693406b3026d0c
1
NSAmelchev/ignite,BiryukovVA/ignite,agura/incubator-ignite,SomeFire/ignite,avinogradovgg/ignite,VladimirErshov/ignite,a1vanov/ignite,voipp/ignite,pperalta/ignite,voipp/ignite,apacheignite/ignite,amirakhmedov/ignite,kidaa/incubator-ignite,dlnufox/ignite,kromulan/ignite,adeelmahmood/ignite,chandresh-pancholi/ignite,vadopolski/ignite,leveyj/ignite,dmagda/incubator-ignite,gargvish/ignite,dream-x/ignite,avinogradovgg/ignite,agoncharuk/ignite,shroman/ignite,tkpanther/ignite,adeelmahmood/ignite,f7753/ignite,iveselovskiy/ignite,iveselovskiy/ignite,ryanzz/ignite,psadusumilli/ignite,ilantukh/ignite,dmagda/incubator-ignite,svladykin/ignite,vsisko/incubator-ignite,shroman/ignite,wmz7year/ignite,ryanzz/ignite,ashutakGG/incubator-ignite,vsisko/incubator-ignite,vsisko/incubator-ignite,iveselovskiy/ignite,thuTom/ignite,apacheignite/ignite,nivanov/ignite,rfqu/ignite,dlnufox/ignite,WilliamDo/ignite,agura/incubator-ignite,NSAmelchev/ignite,apache/ignite,amirakhmedov/ignite,xtern/ignite,SharplEr/ignite,kidaa/incubator-ignite,wmz7year/ignite,akuznetsov-gridgain/ignite,svladykin/ignite,agura/incubator-ignite,nivanov/ignite,dmagda/incubator-ignite,xtern/ignite,apacheignite/ignite,ilantukh/ignite,alexzaitzev/ignite,andrey-kuznetsov/ignite,sk0x50/ignite,vladisav/ignite,adeelmahmood/ignite,xtern/ignite,dlnufox/ignite,psadusumilli/ignite,sk0x50/ignite,rfqu/ignite,WilliamDo/ignite,rfqu/ignite,a1vanov/ignite,avinogradovgg/ignite,leveyj/ignite,chandresh-pancholi/ignite,alexzaitzev/ignite,vsuslov/incubator-ignite,gargvish/ignite,alexzaitzev/ignite,pperalta/ignite,louishust/incubator-ignite,arijitt/incubator-ignite,vsisko/incubator-ignite,f7753/ignite,WilliamDo/ignite,murador/ignite,akuznetsov-gridgain/ignite,shurun19851206/ignite,avinogradovgg/ignite,agura/incubator-ignite,rfqu/ignite,adeelmahmood/ignite,zzcclp/ignite,SomeFire/ignite,vladisav/ignite,sk0x50/ignite,xtern/ignite,voipp/ignite,a1vanov/ignite,irudyak/ignite,DoudTechData/ignite,ptupitsyn/ignite,vsuslov/incubator-ignite,vldpyatkov/ignite,vadopolski/ignite,BiryukovVA/ignite,tkpanther/ignite,shroman/ignite,andrey-kuznetsov/ignite,afinka77/ignite,nizhikov/ignite,mcherkasov/ignite,irudyak/ignite,amirakhmedov/ignite,agura/incubator-ignite,ptupitsyn/ignite,sylentprayer/ignite,samaitra/ignite,avinogradovgg/ignite,ntikhonov/ignite,psadusumilli/ignite,xtern/ignite,psadusumilli/ignite,ascherbakoff/ignite,vadopolski/ignite,vadopolski/ignite,agura/incubator-ignite,DoudTechData/ignite,mcherkasov/ignite,irudyak/ignite,mcherkasov/ignite,a1vanov/ignite,StalkXT/ignite,iveselovskiy/ignite,thuTom/ignite,gridgain/apache-ignite,dlnufox/ignite,kromulan/ignite,SomeFire/ignite,apacheignite/ignite,dream-x/ignite,dmagda/incubator-ignite,endian675/ignite,psadusumilli/ignite,vldpyatkov/ignite,tkpanther/ignite,ptupitsyn/ignite,BiryukovVA/ignite,shurun19851206/ignite,SharplEr/ignite,kidaa/incubator-ignite,agoncharuk/ignite,irudyak/ignite,kromulan/ignite,f7753/ignite,ashutakGG/incubator-ignite,VladimirErshov/ignite,StalkXT/ignite,SharplEr/ignite,abhishek-ch/incubator-ignite,ptupitsyn/ignite,SharplEr/ignite,WilliamDo/ignite,afinka77/ignite,amirakhmedov/ignite,shroman/ignite,daradurvs/ignite,NSAmelchev/ignite,xtern/ignite,leveyj/ignite,arijitt/incubator-ignite,mcherkasov/ignite,zzcclp/ignite,afinka77/ignite,StalkXT/ignite,SomeFire/ignite,endian675/ignite,ashutakGG/incubator-ignite,ilantukh/ignite,daradurvs/ignite,vsisko/incubator-ignite,gargvish/ignite,mcherkasov/ignite,murador/ignite,ilantukh/ignite,sylentprayer/ignite,SharplEr/ignite,irudyak/ignite,ntikhonov/ignite,ilantukh/ignite,nizhikov/ignite,ascherbakoff/ignite,vsuslov/incubator-ignite,vladisav/ignite,shroman/ignite,a1vanov/ignite,arijitt/incubator-ignite,sk0x50/ignite,andrey-kuznetsov/ignite,daradurvs/ignite,a1vanov/ignite,abhishek-ch/incubator-ignite,vldpyatkov/ignite,irudyak/ignite,ilantukh/ignite,andrey-kuznetsov/ignite,leveyj/ignite,BiryukovVA/ignite,vsisko/incubator-ignite,tkpanther/ignite,leveyj/ignite,apache/ignite,ntikhonov/ignite,ascherbakoff/ignite,akuznetsov-gridgain/ignite,kromulan/ignite,vladisav/ignite,ryanzz/ignite,louishust/incubator-ignite,shurun19851206/ignite,NSAmelchev/ignite,dmagda/incubator-ignite,ascherbakoff/ignite,andrey-kuznetsov/ignite,ascherbakoff/ignite,alexzaitzev/ignite,sylentprayer/ignite,kromulan/ignite,samaitra/ignite,StalkXT/ignite,gridgain/apache-ignite,akuznetsov-gridgain/ignite,louishust/incubator-ignite,rfqu/ignite,nizhikov/ignite,dream-x/ignite,daradurvs/ignite,f7753/ignite,alexzaitzev/ignite,rfqu/ignite,gridgain/apache-ignite,vadopolski/ignite,andrey-kuznetsov/ignite,vadopolski/ignite,agoncharuk/ignite,ashutakGG/incubator-ignite,arijitt/incubator-ignite,murador/ignite,f7753/ignite,vldpyatkov/ignite,SomeFire/ignite,vsuslov/incubator-ignite,adeelmahmood/ignite,samaitra/ignite,SharplEr/ignite,gargvish/ignite,vsuslov/incubator-ignite,f7753/ignite,vsisko/incubator-ignite,adeelmahmood/ignite,afinka77/ignite,dlnufox/ignite,nivanov/ignite,SomeFire/ignite,thuTom/ignite,dmagda/incubator-ignite,WilliamDo/ignite,pperalta/ignite,agura/incubator-ignite,sylentprayer/ignite,vsuslov/incubator-ignite,tkpanther/ignite,BiryukovVA/ignite,nizhikov/ignite,chandresh-pancholi/ignite,apache/ignite,iveselovskiy/ignite,zzcclp/ignite,WilliamDo/ignite,DoudTechData/ignite,endian675/ignite,ryanzz/ignite,StalkXT/ignite,louishust/incubator-ignite,samaitra/ignite,nizhikov/ignite,daradurvs/ignite,wmz7year/ignite,vldpyatkov/ignite,apache/ignite,ptupitsyn/ignite,daradurvs/ignite,dlnufox/ignite,SharplEr/ignite,chandresh-pancholi/ignite,chandresh-pancholi/ignite,adeelmahmood/ignite,vladisav/ignite,voipp/ignite,thuTom/ignite,agoncharuk/ignite,ryanzz/ignite,samaitra/ignite,gargvish/ignite,VladimirErshov/ignite,SomeFire/ignite,endian675/ignite,shroman/ignite,alexzaitzev/ignite,BiryukovVA/ignite,gargvish/ignite,apache/ignite,agoncharuk/ignite,sk0x50/ignite,pperalta/ignite,WilliamDo/ignite,vladisav/ignite,dream-x/ignite,gridgain/apache-ignite,xtern/ignite,voipp/ignite,SomeFire/ignite,afinka77/ignite,wmz7year/ignite,murador/ignite,avinogradovgg/ignite,samaitra/ignite,arijitt/incubator-ignite,shroman/ignite,ntikhonov/ignite,chandresh-pancholi/ignite,svladykin/ignite,amirakhmedov/ignite,ilantukh/ignite,apacheignite/ignite,chandresh-pancholi/ignite,leveyj/ignite,apache/ignite,zzcclp/ignite,ntikhonov/ignite,voipp/ignite,irudyak/ignite,VladimirErshov/ignite,apacheignite/ignite,NSAmelchev/ignite,kidaa/incubator-ignite,wmz7year/ignite,daradurvs/ignite,chandresh-pancholi/ignite,abhishek-ch/incubator-ignite,arijitt/incubator-ignite,dream-x/ignite,sylentprayer/ignite,SharplEr/ignite,SomeFire/ignite,endian675/ignite,apache/ignite,apacheignite/ignite,sylentprayer/ignite,apache/ignite,samaitra/ignite,sylentprayer/ignite,dlnufox/ignite,endian675/ignite,daradurvs/ignite,chandresh-pancholi/ignite,DoudTechData/ignite,ilantukh/ignite,murador/ignite,zzcclp/ignite,ascherbakoff/ignite,vldpyatkov/ignite,kromulan/ignite,dlnufox/ignite,StalkXT/ignite,shurun19851206/ignite,thuTom/ignite,avinogradovgg/ignite,voipp/ignite,nivanov/ignite,thuTom/ignite,ptupitsyn/ignite,ptupitsyn/ignite,psadusumilli/ignite,f7753/ignite,nizhikov/ignite,ryanzz/ignite,nivanov/ignite,amirakhmedov/ignite,gridgain/apache-ignite,abhishek-ch/incubator-ignite,xtern/ignite,dmagda/incubator-ignite,ptupitsyn/ignite,akuznetsov-gridgain/ignite,andrey-kuznetsov/ignite,tkpanther/ignite,shroman/ignite,BiryukovVA/ignite,apache/ignite,andrey-kuznetsov/ignite,shurun19851206/ignite,wmz7year/ignite,vadopolski/ignite,kromulan/ignite,DoudTechData/ignite,louishust/incubator-ignite,wmz7year/ignite,thuTom/ignite,amirakhmedov/ignite,VladimirErshov/ignite,iveselovskiy/ignite,a1vanov/ignite,ntikhonov/ignite,murador/ignite,sk0x50/ignite,ryanzz/ignite,zzcclp/ignite,abhishek-ch/incubator-ignite,xtern/ignite,NSAmelchev/ignite,nizhikov/ignite,DoudTechData/ignite,gargvish/ignite,leveyj/ignite,vadopolski/ignite,vladisav/ignite,gridgain/apache-ignite,VladimirErshov/ignite,VladimirErshov/ignite,pperalta/ignite,shurun19851206/ignite,svladykin/ignite,zzcclp/ignite,adeelmahmood/ignite,wmz7year/ignite,daradurvs/ignite,murador/ignite,tkpanther/ignite,psadusumilli/ignite,ascherbakoff/ignite,samaitra/ignite,BiryukovVA/ignite,shroman/ignite,DoudTechData/ignite,DoudTechData/ignite,a1vanov/ignite,pperalta/ignite,ptupitsyn/ignite,nivanov/ignite,sk0x50/ignite,irudyak/ignite,gridgain/apache-ignite,kidaa/incubator-ignite,svladykin/ignite,vldpyatkov/ignite,endian675/ignite,dmagda/incubator-ignite,amirakhmedov/ignite,psadusumilli/ignite,voipp/ignite,alexzaitzev/ignite,voipp/ignite,rfqu/ignite,kidaa/incubator-ignite,zzcclp/ignite,vladisav/ignite,ilantukh/ignite,andrey-kuznetsov/ignite,ilantukh/ignite,ryanzz/ignite,pperalta/ignite,nivanov/ignite,mcherkasov/ignite,andrey-kuznetsov/ignite,louishust/incubator-ignite,afinka77/ignite,vldpyatkov/ignite,akuznetsov-gridgain/ignite,alexzaitzev/ignite,dream-x/ignite,rfqu/ignite,nizhikov/ignite,StalkXT/ignite,pperalta/ignite,vsisko/incubator-ignite,ascherbakoff/ignite,WilliamDo/ignite,tkpanther/ignite,kromulan/ignite,BiryukovVA/ignite,agoncharuk/ignite,shurun19851206/ignite,ptupitsyn/ignite,dream-x/ignite,StalkXT/ignite,agoncharuk/ignite,daradurvs/ignite,sk0x50/ignite,leveyj/ignite,thuTom/ignite,nizhikov/ignite,nivanov/ignite,samaitra/ignite,ashutakGG/incubator-ignite,mcherkasov/ignite,afinka77/ignite,abhishek-ch/incubator-ignite,sylentprayer/ignite,ascherbakoff/ignite,NSAmelchev/ignite,ntikhonov/ignite,samaitra/ignite,agura/incubator-ignite,shroman/ignite,ashutakGG/incubator-ignite,shurun19851206/ignite,svladykin/ignite,murador/ignite,mcherkasov/ignite,svladykin/ignite,SomeFire/ignite,ntikhonov/ignite,BiryukovVA/ignite,VladimirErshov/ignite,f7753/ignite,endian675/ignite,StalkXT/ignite,gargvish/ignite,apacheignite/ignite,afinka77/ignite,irudyak/ignite,amirakhmedov/ignite,SharplEr/ignite,sk0x50/ignite,NSAmelchev/ignite,alexzaitzev/ignite,NSAmelchev/ignite,dream-x/ignite,agoncharuk/ignite
/* @java.file.header */ /* _________ _____ __________________ _____ * __ ____/___________(_)______ /__ ____/______ ____(_)_______ * _ / __ __ ___/__ / _ __ / _ / __ _ __ `/__ / __ __ \ * / /_/ / _ / _ / / /_/ / / /_/ / / /_/ / _ / _ / / / * \____/ /_/ /_/ \_,__/ \____/ \__,_/ /_/ /_/ /_/ */ package org.gridgain.grid.kernal.processors.query.h2.sql; import org.gridgain.grid.kernal.processors.cache.query.*; import java.sql.*; import java.util.*; /** * Splits a single SQL query into two step map-reduce query. */ public class GridSqlQuerySplitter { /** * @param conn Connection. * @param query Query. * @param params Parameters. * @return Two step query. */ public GridCacheTwoStepQuery split(Connection conn, String query, Collection<?> params) { GridSqlSelect qry = GridSqlQueryParser.parse(conn, query); // GridSqlSelect rdcQry = qry.clone(); for (GridSqlElement el : qry.select()) { } if (qry.distinct()) { } qry.from(); qry.where(); qry.groups(); qry.having(); qry.sort(); } private boolean checkGroup(GridSqlSelect qry) { if (qry.distinct()) return true; qry.from(); qry.where(); qry.groups(); qry.having(); qry.sort(); } }
modules/indexing/src/main/java/org/gridgain/grid/kernal/processors/query/h2/sql/GridSqlQuerySplitter.java
ignite-gg9499 - splitter
modules/indexing/src/main/java/org/gridgain/grid/kernal/processors/query/h2/sql/GridSqlQuerySplitter.java
ignite-gg9499 - splitter
Java
apache-2.0
error: pathspec 'src/main/java/org/ppwcode/vernacular/persistence_III/junit/hibernate2/AbstractHibernatePersistentBeanTest.java' did not match any file(s) known to git
1a79bc2476e13bc03f68684b025edc14a50d7650
1
peopleware/java-ppwcode-vernacular-persistence,peopleware/java-ppwcode-vernacular-persistence
/*<license> Copyright 2004, PeopleWare n.v. NO RIGHTS ARE GRANTED FOR THE USE OF THIS SOFTWARE, EXCEPT, IN WRITING, TO SELECTED PARTIES. </license>*/ package org.ppwcode.vernacular.persistence_III.junit.hibernate2; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.Condition; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.ppwcode.bean_VI.CompoundPropertyException; import org.ppwcode.bean_VI.PropertyException; import org.ppwcode.vernacular.persistence_III.PersistentBean; import be.peopleware.persistence_II.hibernate.HibernatePagingList; /** * A simple helper class for hibernate actions within jUnit tests. * * @invar getClassUnderTest() != null; * @invar PersistentBean.class.isAssignableFrom(getClassUnderTest()); * @author David Van Keer * @author Peopleware n.v. * @todo (nsmeets) Copied from WoundPilot. */ public abstract class AbstractHibernatePersistentBeanTest extends AbstractHibernate2Test { /*<section name="Meta Information">*/ //------------------------------------------------------------------ /** {@value} */ public static final String CVS_REVISION = "$Revision$"; //$NON-NLS-1$ /** {@value} */ public static final String CVS_DATE = "$Date$"; //$NON-NLS-1$ /** {@value} */ public static final String CVS_STATE = "$State$"; //$NON-NLS-1$ /** {@value} */ public static final String CVS_TAG = "$Name$"; //$NON-NLS-1$ /*</section>*/ private static final Log LOG = LogFactory.getLog(AbstractHibernatePersistentBeanTest.class); /** * Create a new test for the given class. * * @param classUnderTest * @pre classUnderTest != null; * @pre PersistentBean.class.isAssignableFrom(classUnderTest); * @post new.getClassUnderTest() == classUnderTest; */ protected AbstractHibernatePersistentBeanTest(final Class classUnderTest) { assert classUnderTest != null; assert PersistentBean.class.isAssignableFrom(classUnderTest); $classUnderTest = classUnderTest; } /*<property name="alarmSymptoms">*/ //------------------------------------------------------------------ /** * Returns the class that is tested. * * @basic */ public final Class getClassUnderTest() { return $classUnderTest; } private Class $classUnderTest; /*</property>*/ /** * Tests all instances of {@link #getClassUnderTest()} in the underlying * storage. * The method {@link #validatePersistentBean(PersistentBean)} is used to test * the persistent beans. * When logging is debug enabled, we only retrieve and test 1 page. */ public void testAlInstances() { LOG.debug("Opening Hibernate session and starting a new transaction."); openSession(); LOG.info("Creating paging set to retrieve instances of " + getClassUnderTest() + " from database in a new session."); ListIterator pages = loadInstancesToTest().listIterator(); if (pages.hasNext()) { LOG.info("Retrieving instances of page " + pages.nextIndex() + " of "+ getClassUnderTest() + " from database."); List pbs = (List)pages.next(); LOG.info("Retrieved " + pbs.size() + " PersistentBeans."); Iterator iter = pbs.iterator(); while (iter.hasNext()) { PersistentBean pb = (PersistentBean)iter.next(); // validatePersistentBean(pb); } } LOG.debug("Closing session"); closeSession(); } /** * Overwrite if you do not wish to test all instances. * Session is open. */ protected HibernatePagingList loadInstancesToTest() { return retrievePages(getClassUnderTest()); } // /** // * Retrieves the class contract corresponding to the class that is tested. // * // * @return (ClassContract)Contracts.typeContractInstance(getClassUnderTest()) // * does not throw an exception // * ? result == (ClassContract)Contracts.typeContractInstance(getClassUnderTest()) // * : result == null; // */ // protected final ClassContract getClassContract() { // ClassContract result = null; // try { // result = (ClassContract)Contracts.typeContractInstance(getClassUnderTest()); // } // catch (IOException e) { // assert false : "IOException should not happen: " + e; // } // catch (ClassNotFoundException e) { // assert false : "ClassNotFoundException should not happen: " + e; // } // return result; // } // /** // * Validate the given persistent bean. // * The following validations are executed: // * - the given persistent bean should be effective // * - the invariants are checked // * - some extra validation, using {@link #extraPersistentBeanValidation(PersistentBean)} // */ // protected void validatePersistentBean(final PersistentBean pb) { // if (LOG.isDebugEnabled()) { // LOG.debug("pb: " + ((pb == null) ? "null" : pb.toString())); // } // assertNotNull(pb); // validateTypeInvariants(pb); // boolean civilized = pb.isCivilized(); // /* data in DB must not really be civilized. What we STORE must be, // * but what we get doesn't have to be (as long as type invariants // * are ok. // * But it is something weird: WARN. // */ // if (LOG.isWarnEnabled() && (!civilized)) { // CompoundPropertyException cpe = pb.getWildExceptions(); // LOG.warn("Not civilized: " + pb); // Iterator iter1 = cpe.getElementExceptions().values().iterator(); // while (iter1.hasNext()) { // Set peSet = (Set)iter1.next(); // Iterator iter2 = peSet.iterator(); // while (iter2.hasNext()) { // PropertyException pe = (PropertyException)iter2.next(); // LOG.warn(" " + pe.getLocalizedMessage()); // LOG.warn(" originType: " + pe.getOriginType()); // LOG.warn(" origin: " + pe.getOrigin()); // LOG.warn(" propertyName: " + pe.getPropertyName()); // } // } // } // extraPersistentBeanValidation(pb); // } /** * Some extra validation to be performed on the given persistent bean. * Should be overridden by subclasses. */ protected void extraPersistentBeanValidation(final PersistentBean pb) { // NOP } // private void validateTypeInvariants(final Object instance) { // assert instance != null; // LOG.debug("getClassContract(): " + getClassContract()); // Set invars = getClassContract().getTypeInvariantConditions(); // Map context = new HashMap(); // context.put(Condition.SUBJECT_KEY, instance); // Iterator iter = invars.iterator(); // while (iter.hasNext()) { // Condition c = (Condition)iter.next(); // boolean result = c.validate(context); // if (LOG.isErrorEnabled() && (!result)) { // LOG.error("type invariant violation: " + c + " for " + instance); // } // assertTrue(result); // } // } }
src/main/java/org/ppwcode/vernacular/persistence_III/junit/hibernate2/AbstractHibernatePersistentBeanTest.java
Reactivating AbstractHibernate(2)PersistentBeanTest git-svn-id: 380e98d998f12029d9780e105f217f907d976ae8@1424 6057c1f7-a7c9-48c9-a9a6-1240b2ac66dc
src/main/java/org/ppwcode/vernacular/persistence_III/junit/hibernate2/AbstractHibernatePersistentBeanTest.java
Reactivating AbstractHibernate(2)PersistentBeanTest
Java
bsd-3-clause
f56a27bb4b041d84d1a144a21cfe46ed4c2d8324
0
wjkohnen/antlr4,Distrotech/antlr4,sidhart/antlr4,cocosli/antlr4,jvanzyl/antlr4,chandler14362/antlr4,cooperra/antlr4,antlr/antlr4,worsht/antlr4,sidhart/antlr4,mcanthony/antlr4,chienjchienj/antlr4,chandler14362/antlr4,chienjchienj/antlr4,chandler14362/antlr4,ericvergnaud/antlr4,lncosie/antlr4,Pursuit92/antlr4,krzkaczor/antlr4,sidhart/antlr4,ericvergnaud/antlr4,mcanthony/antlr4,joshids/antlr4,mcanthony/antlr4,Distrotech/antlr4,jvanzyl/antlr4,supriyantomaftuh/antlr4,lncosie/antlr4,chienjchienj/antlr4,lncosie/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,antlr/antlr4,hce/antlr4,parrt/antlr4,joshids/antlr4,krzkaczor/antlr4,Pursuit92/antlr4,Pursuit92/antlr4,wjkohnen/antlr4,wjkohnen/antlr4,chienjchienj/antlr4,Distrotech/antlr4,cocosli/antlr4,jvanzyl/antlr4,parrt/antlr4,joshids/antlr4,ericvergnaud/antlr4,antlr/antlr4,jvanzyl/antlr4,worsht/antlr4,Pursuit92/antlr4,chandler14362/antlr4,hce/antlr4,cocosli/antlr4,krzkaczor/antlr4,cooperra/antlr4,hce/antlr4,ericvergnaud/antlr4,cooperra/antlr4,Distrotech/antlr4,parrt/antlr4,worsht/antlr4,Pursuit92/antlr4,chandler14362/antlr4,joshids/antlr4,wjkohnen/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,parrt/antlr4,Pursuit92/antlr4,lncosie/antlr4,joshids/antlr4,mcanthony/antlr4,parrt/antlr4,supriyantomaftuh/antlr4,ericvergnaud/antlr4,krzkaczor/antlr4,ericvergnaud/antlr4,supriyantomaftuh/antlr4,joshids/antlr4,Pursuit92/antlr4,chienjchienj/antlr4,wjkohnen/antlr4,Pursuit92/antlr4,chandler14362/antlr4,joshids/antlr4,Pursuit92/antlr4,supriyantomaftuh/antlr4,ericvergnaud/antlr4,chandler14362/antlr4,parrt/antlr4,parrt/antlr4,cooperra/antlr4,cocosli/antlr4,parrt/antlr4,lncosie/antlr4,Distrotech/antlr4,joshids/antlr4,parrt/antlr4,sidhart/antlr4,chandler14362/antlr4,mcanthony/antlr4,wjkohnen/antlr4,krzkaczor/antlr4,wjkohnen/antlr4,antlr/antlr4,antlr/antlr4,antlr/antlr4,parrt/antlr4,wjkohnen/antlr4,worsht/antlr4,chandler14362/antlr4,antlr/antlr4,hce/antlr4,wjkohnen/antlr4,antlr/antlr4,sidhart/antlr4,supriyantomaftuh/antlr4,worsht/antlr4
/* * [The "BSD license"] * Copyright (c) 2012 Terence Parr * Copyright (c) 2012 Sam Harwell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.antlr.v4.runtime.tree.gui; import org.abego.treelayout.NodeExtentProvider; import org.abego.treelayout.TreeForTreeLayout; import org.abego.treelayout.TreeLayout; import org.abego.treelayout.util.DefaultConfiguration; import org.antlr.v4.runtime.misc.GraphicsSupport; import org.antlr.v4.runtime.misc.JFileChooserConfirmOverwrite; import org.antlr.v4.runtime.misc.Utils; import org.antlr.v4.runtime.tree.ErrorNode; import org.antlr.v4.runtime.tree.Tree; import org.antlr.v4.runtime.tree.Trees; import javax.imageio.ImageIO; import javax.print.PrintException; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.filechooser.FileFilter; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreePath; import javax.swing.tree.TreeSelectionModel; import java.awt.*; import java.awt.event.*; import java.awt.geom.CubicCurve2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.prefs.Preferences; public class TreeViewer extends JComponent { public static final Color LIGHT_RED = new Color(244, 213, 211); public static class DefaultTreeTextProvider implements TreeTextProvider { private final List<String> ruleNames; public DefaultTreeTextProvider(List<String> ruleNames) { this.ruleNames = ruleNames; } @Override public String getText(Tree node) { return String.valueOf(Trees.getNodeText(node, ruleNames)); } } public static class VariableExtentProvide implements NodeExtentProvider<Tree> { TreeViewer viewer; public VariableExtentProvide(TreeViewer viewer) { this.viewer = viewer; } @Override public double getWidth(Tree tree) { FontMetrics fontMetrics = viewer.getFontMetrics(viewer.font); String s = viewer.getText(tree); int w = fontMetrics.stringWidth(s) + viewer.nodeWidthPadding*2; return w; } @Override public double getHeight(Tree tree) { FontMetrics fontMetrics = viewer.getFontMetrics(viewer.font); int h = fontMetrics.getHeight() + viewer.nodeHeightPadding*2; String s = viewer.getText(tree); String[] lines = s.split("\n"); return h * lines.length; } } protected TreeTextProvider treeTextProvider; protected TreeLayout<Tree> treeLayout; protected java.util.List<Tree> highlightedNodes; protected String fontName = "Helvetica"; //Font.SANS_SERIF; protected int fontStyle = Font.PLAIN; protected int fontSize = 11; protected Font font = new Font(fontName, fontStyle, fontSize); protected double gapBetweenLevels = 17; protected double gapBetweenNodes = 7; protected int nodeWidthPadding = 2; // added to left/right protected int nodeHeightPadding = 0; // added above/below protected int arcSize = 0; // make an arc in node outline? protected double scale = 1.0; protected Color boxColor = null; // set to a color to make it draw background protected Color highlightedBoxColor = Color.lightGray; protected Color borderColor = null; protected Color textColor = Color.black; public TreeViewer(List<String> ruleNames, Tree tree) { setRuleNames(ruleNames); if ( tree!=null ) { setTree(tree); } setFont(font); } private void updatePreferredSize() { setPreferredSize(getScaledTreeSize()); invalidate(); if (getParent() != null) { getParent().validate(); } repaint(); } // ---------------- PAINT ----------------------------------------------- private boolean useCurvedEdges = false; public boolean getUseCurvedEdges() { return useCurvedEdges; } public void setUseCurvedEdges(boolean useCurvedEdges) { this.useCurvedEdges = useCurvedEdges; } protected void paintEdges(Graphics g, Tree parent) { if (!getTree().isLeaf(parent)) { BasicStroke stroke = new BasicStroke(1.0f, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND); ((Graphics2D)g).setStroke(stroke); Rectangle2D.Double parentBounds = getBoundsOfNode(parent); double x1 = parentBounds.getCenterX(); double y1 = parentBounds.getMaxY(); for (Tree child : getTree().getChildren(parent)) { Rectangle2D.Double childBounds = getBoundsOfNode(child); double x2 = childBounds.getCenterX(); double y2 = childBounds.getMinY(); if (getUseCurvedEdges()) { CubicCurve2D c = new CubicCurve2D.Double(); double ctrlx1 = x1; double ctrly1 = (y1+y2)/2; double ctrlx2 = x2; double ctrly2 = y1; c.setCurve(x1, y1, ctrlx1, ctrly1, ctrlx2, ctrly2, x2, y2); ((Graphics2D) g).draw(c); } else { g.drawLine((int) x1, (int) y1, (int) x2, (int) y2); } paintEdges(g, child); } } } protected void paintBox(Graphics g, Tree tree) { Rectangle2D.Double box = getBoundsOfNode(tree); // draw the box in the background if ( isHighlighted(tree) || boxColor!=null || tree instanceof ErrorNode ) { if ( isHighlighted(tree) ) g.setColor(highlightedBoxColor); else if ( tree instanceof ErrorNode ) g.setColor(LIGHT_RED); else g.setColor(boxColor); g.fillRoundRect((int) box.x, (int) box.y, (int) box.width - 1, (int) box.height - 1, arcSize, arcSize); } if ( borderColor!=null ) { g.setColor(borderColor); g.drawRoundRect((int) box.x, (int) box.y, (int) box.width - 1, (int) box.height - 1, arcSize, arcSize); } // draw the text on top of the box (possibly multiple lines) g.setColor(textColor); String s = getText(tree); String[] lines = s.split("\n"); FontMetrics m = getFontMetrics(font); int x = (int) box.x + arcSize / 2 + nodeWidthPadding; int y = (int) box.y + m.getAscent() + m.getLeading() + 1 + nodeHeightPadding; for (int i = 0; i < lines.length; i++) { text(g, lines[i], x, y); y += m.getHeight(); } } public void text(Graphics g, String s, int x, int y) { // System.out.println("drawing '"+s+"' @ "+x+","+y); s = Utils.escapeWhitespace(s, true); g.drawString(s, x, y); } @Override public void paint(Graphics g) { super.paint(g); if ( treeLayout==null ) { return; } Graphics2D g2 = (Graphics2D)g; // anti-alias the lines g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); // Anti-alias the text g2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON); // AffineTransform at = g2.getTransform(); // g2.scale( // (double) this.getWidth() / 400, // (double) this.getHeight() / 400); // // g2.setTransform(at); paintEdges(g, getTree().getRoot()); // paint the boxes for (Tree Tree : treeLayout.getNodeBounds().keySet()) { paintBox(g, Tree); } } @Override protected Graphics getComponentGraphics(Graphics g) { Graphics2D g2d=(Graphics2D)g; g2d.scale(scale, scale); return super.getComponentGraphics(g2d); } // ---------------------------------------------------------------------- private static final String DIALOG_WIDTH_PREFS_KEY = "dialog_width"; private static final String DIALOG_HEIGHT_PREFS_KEY = "dialog_height"; private static final String DIALOG_X_PREFS_KEY = "dialog_x"; private static final String DIALOG_Y_PREFS_KEY = "dialog_y"; private static final String DIALOG_DIVIDER_LOC_PREFS_KEY = "dialog_divider_location"; private static final String DIALOG_VIEWER_SCALE_PREFS_KEY = "dialog_slider_location"; protected static JDialog showInDialog(final TreeViewer viewer) { final JDialog dialog = new JDialog(); dialog.setTitle("Parse Tree Inspector"); final Preferences prefs = Preferences.userNodeForPackage(TreeViewer.class); // Make new content panes final Container mainPane = new JPanel(new BorderLayout(5,5)); final Container contentPane = new JPanel(new BorderLayout(0,0)); contentPane.setBackground(Color.white); // Wrap viewer in scroll pane JScrollPane scrollPane = new JScrollPane(viewer); // Make the scrollpane (containing the viewer) the center component contentPane.add(scrollPane, BorderLayout.CENTER); JPanel wrapper = new JPanel(new FlowLayout()); // Add button to bottom JPanel bottomPanel = new JPanel(new BorderLayout(0,0)); contentPane.add(bottomPanel, BorderLayout.SOUTH); JButton ok = new JButton("OK"); ok.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { dialog.dispatchEvent(new WindowEvent(dialog, WindowEvent.WINDOW_CLOSING)); } } ); wrapper.add(ok); // Add an export-to-png button right of the "OK" button JButton png = new JButton("Export as PNG"); png.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { generatePNGFile(viewer, dialog); } } ); wrapper.add(png); bottomPanel.add(wrapper, BorderLayout.SOUTH); // Add scale slider double lastKnownViewerScale = prefs.getDouble(DIALOG_VIEWER_SCALE_PREFS_KEY, viewer.getScale()); viewer.setScale(lastKnownViewerScale); int sliderValue = (int) ((lastKnownViewerScale - 1.0) * 1000); final JSlider scaleSlider = new JSlider(JSlider.HORIZONTAL, -999, 1000, sliderValue); scaleSlider.addChangeListener( new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { int v = scaleSlider.getValue(); viewer.setScale(v / 1000.0 + 1.0); } } ); bottomPanel.add(scaleSlider, BorderLayout.CENTER); // Add a JTree representing the parser tree of the input. JPanel treePanel = new JPanel(new BorderLayout(5, 5)); // An "empty" icon that will be used for the JTree's nodes. Icon empty = new EmptyIcon(); UIManager.put("Tree.closedIcon", empty); UIManager.put("Tree.openIcon", empty); UIManager.put("Tree.leafIcon", empty); Tree parseTreeRoot = viewer.getTree().getRoot(); TreeNodeWrapper nodeRoot = new TreeNodeWrapper(parseTreeRoot, viewer); fillTree(nodeRoot, parseTreeRoot, viewer); final JTree tree = new JTree(nodeRoot); tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); tree.addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent e) { JTree selectedTree = (JTree) e.getSource(); TreePath path = selectedTree.getSelectionPath(); TreeNodeWrapper treeNode = (TreeNodeWrapper) path.getLastPathComponent(); // Set the clicked AST. viewer.setTree((Tree) treeNode.getUserObject()); } }); treePanel.add(new JScrollPane(tree)); // Create the pane for both the JTree and the AST final JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, treePanel, contentPane); mainPane.add(splitPane, BorderLayout.CENTER); dialog.setContentPane(mainPane); // make viz WindowListener exitListener = new WindowAdapter() { public void windowClosing(WindowEvent e) { prefs.putInt(DIALOG_WIDTH_PREFS_KEY, (int) dialog.getSize().getWidth()); prefs.putInt(DIALOG_HEIGHT_PREFS_KEY, (int) dialog.getSize().getHeight()); prefs.putDouble(DIALOG_X_PREFS_KEY, dialog.getLocationOnScreen().getX()); prefs.putDouble(DIALOG_Y_PREFS_KEY, dialog.getLocationOnScreen().getY()); prefs.putInt(DIALOG_DIVIDER_LOC_PREFS_KEY, splitPane.getDividerLocation()); prefs.putDouble(DIALOG_VIEWER_SCALE_PREFS_KEY, viewer.getScale()); dialog.setVisible(false); dialog.dispose(); } }; dialog.addWindowListener(exitListener); dialog.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); int width = prefs.getInt(DIALOG_WIDTH_PREFS_KEY, 600); int height = prefs.getInt(DIALOG_HEIGHT_PREFS_KEY, 500); dialog.setPreferredSize(new Dimension(width, height)); dialog.pack(); // After pack(): set the divider at 1/3 (200/600) of the frame. int dividerLocation = prefs.getInt(DIALOG_DIVIDER_LOC_PREFS_KEY, 200); splitPane.setDividerLocation(dividerLocation); if (prefs.getDouble(DIALOG_X_PREFS_KEY, -1) != -1) { dialog.setLocation( (int)prefs.getDouble(DIALOG_X_PREFS_KEY, 100), (int)prefs.getDouble(DIALOG_Y_PREFS_KEY, 100) ); } else { dialog.setLocationRelativeTo(null); } dialog.setVisible(true); return dialog; } private static void generatePNGFile(TreeViewer viewer, JDialog dialog) { BufferedImage bi = new BufferedImage(viewer.getSize().width, viewer.getSize().height, BufferedImage.TYPE_INT_ARGB); Graphics g = bi.createGraphics(); viewer.paint(g); g.dispose(); try { File suggestedFile = generateNonExistingPngFile(); JFileChooser fileChooser = new JFileChooserConfirmOverwrite(); fileChooser.setCurrentDirectory(suggestedFile.getParentFile()); fileChooser.setSelectedFile(suggestedFile); FileFilter pngFilter = new FileFilter() { @Override public boolean accept(File pathname) { if (pathname.isFile()) { return pathname.getName().toLowerCase().endsWith(".png"); } return true; } @Override public String getDescription() { return "PNG Files (*.png)"; } }; fileChooser.addChoosableFileFilter(pngFilter); fileChooser.setFileFilter(pngFilter); int returnValue = fileChooser.showSaveDialog(dialog); if (returnValue == JFileChooser.APPROVE_OPTION) { File pngFile = fileChooser.getSelectedFile(); ImageIO.write(bi, "png", pngFile); try { // Try to open the parent folder using the OS' native file manager. Desktop.getDesktop().open(pngFile.getParentFile()); } catch (Exception ex) { // We could not launch the file manager: just show a popup that we // succeeded in saving the PNG file. JOptionPane.showMessageDialog(dialog, "Saved PNG to: " + pngFile.getAbsolutePath()); ex.printStackTrace(); } } } catch (Exception ex) { JOptionPane.showMessageDialog(dialog, "Could not export to PNG: " + ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); ex.printStackTrace(); } } private static File generateNonExistingPngFile() { final String parent = "."; final String name = "antlr4_parse_tree"; final String extension = ".png"; File pngFile = new File(parent, name + extension); int counter = 1; // Keep looping until we create a File that does not yet exist. while (pngFile.exists()) { pngFile = new File(parent, name + "_"+ counter + extension); counter++; } return pngFile; } private static void fillTree(TreeNodeWrapper node, Tree tree, TreeViewer viewer) { if (tree == null) { return; } for (int i = 0; i < tree.getChildCount(); i++) { Tree childTree = tree.getChild(i); TreeNodeWrapper childNode = new TreeNodeWrapper(childTree, viewer); node.add(childNode); fillTree(childNode, childTree, viewer); } } private Dimension getScaledTreeSize() { Dimension scaledTreeSize = treeLayout.getBounds().getBounds().getSize(); scaledTreeSize = new Dimension((int)(scaledTreeSize.width*scale), (int)(scaledTreeSize.height*scale)); return scaledTreeSize; } public Future<JDialog> open() { final TreeViewer viewer = this; viewer.setScale(1.5); Callable<JDialog> callable = new Callable<JDialog>() { JDialog result; @Override public JDialog call() throws Exception { SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { result = showInDialog(viewer); } }); return result; } }; ExecutorService executor = Executors.newSingleThreadExecutor(); try { return executor.submit(callable); } finally { executor.shutdown(); } } public void save(String fileName) throws IOException, PrintException { JDialog dialog = new JDialog(); Container contentPane = dialog.getContentPane(); ((JComponent) contentPane).setBorder(BorderFactory.createEmptyBorder( 10, 10, 10, 10)); contentPane.add(this); contentPane.setBackground(Color.white); dialog.pack(); dialog.setLocationRelativeTo(null); dialog.dispose(); GraphicsSupport.saveImage(this, fileName); } // --------------------------------------------------- protected Rectangle2D.Double getBoundsOfNode(Tree node) { return treeLayout.getNodeBounds().get(node); } protected String getText(Tree tree) { String s = treeTextProvider.getText(tree); s = Utils.escapeWhitespace(s, true); return s; } public TreeTextProvider getTreeTextProvider() { return treeTextProvider; } public void setTreeTextProvider(TreeTextProvider treeTextProvider) { this.treeTextProvider = treeTextProvider; } public void setFontSize(int sz) { fontSize = sz; font = new Font(fontName, fontStyle, fontSize); } public void setFontName(String name) { fontName = name; font = new Font(fontName, fontStyle, fontSize); } /** Slow for big lists of highlighted nodes */ public void addHighlightedNodes(Collection<Tree> nodes) { highlightedNodes = new ArrayList<Tree>(); highlightedNodes.addAll(nodes); } public void removeHighlightedNodes(Collection<Tree> nodes) { if ( highlightedNodes!=null ) { // only remove exact objects defined by ==, not equals() for (Tree t : nodes) { int i = getHighlightedNodeIndex(t); if ( i>=0 ) highlightedNodes.remove(i); } } } protected boolean isHighlighted(Tree node) { return getHighlightedNodeIndex(node) >= 0; } protected int getHighlightedNodeIndex(Tree node) { if ( highlightedNodes==null ) return -1; for (int i = 0; i < highlightedNodes.size(); i++) { Tree t = highlightedNodes.get(i); if ( t == node ) return i; } return -1; } @Override public Font getFont() { return font; } @Override public void setFont(Font font) { this.font = font; } public int getArcSize() { return arcSize; } public void setArcSize(int arcSize) { this.arcSize = arcSize; } public Color getBoxColor() { return boxColor; } public void setBoxColor(Color boxColor) { this.boxColor = boxColor; } public Color getHighlightedBoxColor() { return highlightedBoxColor; } public void setHighlightedBoxColor(Color highlightedBoxColor) { this.highlightedBoxColor = highlightedBoxColor; } public Color getBorderColor() { return borderColor; } public void setBorderColor(Color borderColor) { this.borderColor = borderColor; } public Color getTextColor() { return textColor; } public void setTextColor(Color textColor) { this.textColor = textColor; } protected TreeForTreeLayout<Tree> getTree() { return treeLayout.getTree(); } public void setTree(Tree root) { if ( root!=null ) { boolean useIdentity = true; // compare node identity this.treeLayout = new TreeLayout<Tree>(new TreeLayoutAdaptor(root), new TreeViewer.VariableExtentProvide(this), new DefaultConfiguration<Tree>(gapBetweenLevels, gapBetweenNodes), useIdentity); // Let the UI display this new AST. updatePreferredSize(); } else { this.treeLayout = null; repaint(); } } public double getScale() { return scale; } public void setScale(double scale) { if(scale <= 0) { scale = 1; } this.scale = scale; updatePreferredSize(); } public void setRuleNames(List<String> ruleNames) { setTreeTextProvider(new DefaultTreeTextProvider(ruleNames)); } private static class TreeNodeWrapper extends DefaultMutableTreeNode { final TreeViewer viewer; TreeNodeWrapper(Tree tree, TreeViewer viewer) { super(tree); this.viewer = viewer; } @Override public String toString() { return viewer.getText((Tree) this.getUserObject()); } } private static class EmptyIcon implements Icon { @Override public int getIconWidth() { return 0; } @Override public int getIconHeight() { return 0; } @Override public void paintIcon(Component c, Graphics g, int x, int y) { /* Do nothing. */ } } }
runtime/Java/src/org/antlr/v4/runtime/tree/gui/TreeViewer.java
/* * [The "BSD license"] * Copyright (c) 2012 Terence Parr * Copyright (c) 2012 Sam Harwell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.antlr.v4.runtime.tree.gui; import org.abego.treelayout.NodeExtentProvider; import org.abego.treelayout.TreeForTreeLayout; import org.abego.treelayout.TreeLayout; import org.abego.treelayout.util.DefaultConfiguration; import org.antlr.v4.runtime.misc.GraphicsSupport; import org.antlr.v4.runtime.misc.JFileChooserConfirmOverwrite; import org.antlr.v4.runtime.misc.NotNull; import org.antlr.v4.runtime.misc.Utils; import org.antlr.v4.runtime.tree.ErrorNode; import org.antlr.v4.runtime.tree.Tree; import org.antlr.v4.runtime.tree.Trees; import javax.imageio.ImageIO; import javax.print.PrintException; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.filechooser.FileFilter; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreePath; import javax.swing.tree.TreeSelectionModel; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.geom.CubicCurve2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; public class TreeViewer extends JComponent { public static final Color LIGHT_RED = new Color(244, 213, 211); public static class DefaultTreeTextProvider implements TreeTextProvider { private final List<String> ruleNames; public DefaultTreeTextProvider(List<String> ruleNames) { this.ruleNames = ruleNames; } @Override public String getText(Tree node) { return String.valueOf(Trees.getNodeText(node, ruleNames)); } } public static class VariableExtentProvide implements NodeExtentProvider<Tree> { TreeViewer viewer; public VariableExtentProvide(TreeViewer viewer) { this.viewer = viewer; } @Override public double getWidth(Tree tree) { FontMetrics fontMetrics = viewer.getFontMetrics(viewer.font); String s = viewer.getText(tree); int w = fontMetrics.stringWidth(s) + viewer.nodeWidthPadding*2; return w; } @Override public double getHeight(Tree tree) { FontMetrics fontMetrics = viewer.getFontMetrics(viewer.font); int h = fontMetrics.getHeight() + viewer.nodeHeightPadding*2; String s = viewer.getText(tree); String[] lines = s.split("\n"); return h * lines.length; } } protected TreeTextProvider treeTextProvider; protected TreeLayout<Tree> treeLayout; protected java.util.List<Tree> highlightedNodes; protected String fontName = "Helvetica"; //Font.SANS_SERIF; protected int fontStyle = Font.PLAIN; protected int fontSize = 11; protected Font font = new Font(fontName, fontStyle, fontSize); protected double gapBetweenLevels = 17; protected double gapBetweenNodes = 7; protected int nodeWidthPadding = 2; // added to left/right protected int nodeHeightPadding = 0; // added above/below protected int arcSize = 0; // make an arc in node outline? protected double scale = 1.0; protected Color boxColor = null; // set to a color to make it draw background protected Color highlightedBoxColor = Color.lightGray; protected Color borderColor = null; protected Color textColor = Color.black; public TreeViewer(List<String> ruleNames, Tree tree) { setRuleNames(ruleNames); if ( tree!=null ) { setTree(tree); } setFont(font); } private void updatePreferredSize() { setPreferredSize(getScaledTreeSize()); invalidate(); if (getParent() != null) { getParent().validate(); } repaint(); } // ---------------- PAINT ----------------------------------------------- private boolean useCurvedEdges = false; public boolean getUseCurvedEdges() { return useCurvedEdges; } public void setUseCurvedEdges(boolean useCurvedEdges) { this.useCurvedEdges = useCurvedEdges; } protected void paintEdges(Graphics g, Tree parent) { if (!getTree().isLeaf(parent)) { BasicStroke stroke = new BasicStroke(1.0f, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND); ((Graphics2D)g).setStroke(stroke); Rectangle2D.Double parentBounds = getBoundsOfNode(parent); double x1 = parentBounds.getCenterX(); double y1 = parentBounds.getMaxY(); for (Tree child : getTree().getChildren(parent)) { Rectangle2D.Double childBounds = getBoundsOfNode(child); double x2 = childBounds.getCenterX(); double y2 = childBounds.getMinY(); if (getUseCurvedEdges()) { CubicCurve2D c = new CubicCurve2D.Double(); double ctrlx1 = x1; double ctrly1 = (y1+y2)/2; double ctrlx2 = x2; double ctrly2 = y1; c.setCurve(x1, y1, ctrlx1, ctrly1, ctrlx2, ctrly2, x2, y2); ((Graphics2D) g).draw(c); } else { g.drawLine((int) x1, (int) y1, (int) x2, (int) y2); } paintEdges(g, child); } } } protected void paintBox(Graphics g, Tree tree) { Rectangle2D.Double box = getBoundsOfNode(tree); // draw the box in the background if ( isHighlighted(tree) || boxColor!=null || tree instanceof ErrorNode ) { if ( isHighlighted(tree) ) g.setColor(highlightedBoxColor); else if ( tree instanceof ErrorNode ) g.setColor(LIGHT_RED); else g.setColor(boxColor); g.fillRoundRect((int) box.x, (int) box.y, (int) box.width - 1, (int) box.height - 1, arcSize, arcSize); } if ( borderColor!=null ) { g.setColor(borderColor); g.drawRoundRect((int) box.x, (int) box.y, (int) box.width - 1, (int) box.height - 1, arcSize, arcSize); } // draw the text on top of the box (possibly multiple lines) g.setColor(textColor); String s = getText(tree); String[] lines = s.split("\n"); FontMetrics m = getFontMetrics(font); int x = (int) box.x + arcSize / 2 + nodeWidthPadding; int y = (int) box.y + m.getAscent() + m.getLeading() + 1 + nodeHeightPadding; for (int i = 0; i < lines.length; i++) { text(g, lines[i], x, y); y += m.getHeight(); } } public void text(Graphics g, String s, int x, int y) { // System.out.println("drawing '"+s+"' @ "+x+","+y); s = Utils.escapeWhitespace(s, true); g.drawString(s, x, y); } @Override public void paint(Graphics g) { super.paint(g); if ( treeLayout==null ) { return; } Graphics2D g2 = (Graphics2D)g; // anti-alias the lines g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); // Anti-alias the text g2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON); // AffineTransform at = g2.getTransform(); // g2.scale( // (double) this.getWidth() / 400, // (double) this.getHeight() / 400); // // g2.setTransform(at); paintEdges(g, getTree().getRoot()); // paint the boxes for (Tree Tree : treeLayout.getNodeBounds().keySet()) { paintBox(g, Tree); } } @Override protected Graphics getComponentGraphics(Graphics g) { Graphics2D g2d=(Graphics2D)g; g2d.scale(scale, scale); return super.getComponentGraphics(g2d); } // ---------------------------------------------------------------------- protected static JDialog showInDialog(final TreeViewer viewer) { final JDialog dialog = new JDialog(); dialog.setTitle("Parse Tree Inspector"); // Make new content panes final Container mainPane = new JPanel(new BorderLayout(5,5)); final Container contentPane = new JPanel(new BorderLayout(0,0)); contentPane.setBackground(Color.white); // Wrap viewer in scroll pane JScrollPane scrollPane = new JScrollPane(viewer); // Make the scrollpane (containing the viewer) the center component contentPane.add(scrollPane, BorderLayout.CENTER); JPanel wrapper = new JPanel(new FlowLayout()); // Add button to bottom JPanel bottomPanel = new JPanel(new BorderLayout(0,0)); contentPane.add(bottomPanel, BorderLayout.SOUTH); JButton ok = new JButton("OK"); ok.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { dialog.setVisible(false); dialog.dispose(); } } ); wrapper.add(ok); // Add an export-to-png button right of the "OK" button JButton png = new JButton("png"); png.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { generatePNGFile(viewer, dialog); } } ); wrapper.add(png); bottomPanel.add(wrapper, BorderLayout.SOUTH); // Add scale slider int sliderValue = (int) ((viewer.getScale()-1.0) * 1000); final JSlider scaleSlider = new JSlider(JSlider.HORIZONTAL, -999,1000,sliderValue); scaleSlider.addChangeListener( new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { int v = scaleSlider.getValue(); viewer.setScale(v / 1000.0 + 1.0); } } ); bottomPanel.add(scaleSlider, BorderLayout.CENTER); // Add a JTree representing the parser tree of the input. JPanel treePanel = new JPanel(new BorderLayout(5, 5)); // An "empty" icon that will be used for the JTree's nodes. Icon empty = new EmptyIcon(); UIManager.put("Tree.closedIcon", empty); UIManager.put("Tree.openIcon", empty); UIManager.put("Tree.leafIcon", empty); Tree parseTreeRoot = viewer.getTree().getRoot(); TreeNodeWrapper nodeRoot = new TreeNodeWrapper(parseTreeRoot, viewer); fillTree(nodeRoot, parseTreeRoot, viewer); final JTree tree = new JTree(nodeRoot); tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); tree.addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent e) { JTree selectedTree = (JTree) e.getSource(); TreePath path = selectedTree.getSelectionPath(); TreeNodeWrapper treeNode = (TreeNodeWrapper) path.getLastPathComponent(); // Set the clicked AST. viewer.setTree((Tree) treeNode.getUserObject()); } }); treePanel.add(new JScrollPane(tree)); // Create the pane for both the JTree and the AST JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, treePanel, contentPane); mainPane.add(splitPane, BorderLayout.CENTER); dialog.setContentPane(mainPane); // make viz dialog.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); dialog.setPreferredSize(new Dimension(600, 500)); dialog.pack(); // After pack(): set the divider at 1/3 of the frame. splitPane.setDividerLocation(0.33); dialog.setLocationRelativeTo(null); dialog.setVisible(true); return dialog; } private static void generatePNGFile(TreeViewer viewer, JDialog dialog) { BufferedImage bi = new BufferedImage(viewer.getSize().width, viewer.getSize().height, BufferedImage.TYPE_INT_ARGB); Graphics g = bi.createGraphics(); viewer.paint(g); g.dispose(); try { File suggestedFile = generateNonExistingPngFile(); JFileChooser fileChooser = new JFileChooserConfirmOverwrite(); fileChooser.setCurrentDirectory(suggestedFile.getParentFile()); fileChooser.setSelectedFile(suggestedFile); FileFilter pngFilter = new FileFilter() { @Override public boolean accept(File pathname) { if (pathname.isFile()) { return pathname.getName().toLowerCase().endsWith(".png"); } return true; } @Override public String getDescription() { return "PNG Files (*.png)"; } }; fileChooser.addChoosableFileFilter(pngFilter); fileChooser.setFileFilter(pngFilter); int returnValue = fileChooser.showSaveDialog(dialog); if (returnValue == JFileChooser.APPROVE_OPTION) { File pngFile = fileChooser.getSelectedFile(); ImageIO.write(bi, "png", pngFile); try { // Try to open the parent folder using the OS' native file manager. Desktop.getDesktop().open(pngFile.getParentFile()); } catch (Exception ex) { // We could not launch the file manager: just show a popup that we // succeeded in saving the PNG file. JOptionPane.showMessageDialog(dialog, "Saved PNG to: " + pngFile.getAbsolutePath()); ex.printStackTrace(); } } } catch (Exception ex) { JOptionPane.showMessageDialog(dialog, "Could not export to PNG: " + ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); ex.printStackTrace(); } } private static File generateNonExistingPngFile() { final String parent = "."; final String name = "antlr4_parse_tree"; final String extension = ".png"; File pngFile = new File(parent, name + extension); int counter = 1; // Keep looping until we create a File that does not yet exist. while (pngFile.exists()) { pngFile = new File(parent, name + "_"+ counter + extension); counter++; } return pngFile; } private static void fillTree(TreeNodeWrapper node, Tree tree, TreeViewer viewer) { if (tree == null) { return; } for (int i = 0; i < tree.getChildCount(); i++) { Tree childTree = tree.getChild(i); TreeNodeWrapper childNode = new TreeNodeWrapper(childTree, viewer); node.add(childNode); fillTree(childNode, childTree, viewer); } } private Dimension getScaledTreeSize() { Dimension scaledTreeSize = treeLayout.getBounds().getBounds().getSize(); scaledTreeSize = new Dimension((int)(scaledTreeSize.width*scale), (int)(scaledTreeSize.height*scale)); return scaledTreeSize; } public Future<JDialog> open() { final TreeViewer viewer = this; viewer.setScale(1.5); Callable<JDialog> callable = new Callable<JDialog>() { JDialog result; @Override public JDialog call() throws Exception { SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { result = showInDialog(viewer); } }); return result; } }; ExecutorService executor = Executors.newSingleThreadExecutor(); try { return executor.submit(callable); } finally { executor.shutdown(); } } public void save(String fileName) throws IOException, PrintException { JDialog dialog = new JDialog(); Container contentPane = dialog.getContentPane(); ((JComponent) contentPane).setBorder(BorderFactory.createEmptyBorder( 10, 10, 10, 10)); contentPane.add(this); contentPane.setBackground(Color.white); dialog.pack(); dialog.setLocationRelativeTo(null); dialog.dispose(); GraphicsSupport.saveImage(this, fileName); } // --------------------------------------------------- protected Rectangle2D.Double getBoundsOfNode(Tree node) { return treeLayout.getNodeBounds().get(node); } protected String getText(Tree tree) { String s = treeTextProvider.getText(tree); s = Utils.escapeWhitespace(s, true); return s; } public TreeTextProvider getTreeTextProvider() { return treeTextProvider; } public void setTreeTextProvider(TreeTextProvider treeTextProvider) { this.treeTextProvider = treeTextProvider; } public void setFontSize(int sz) { fontSize = sz; font = new Font(fontName, fontStyle, fontSize); } public void setFontName(String name) { fontName = name; font = new Font(fontName, fontStyle, fontSize); } /** Slow for big lists of highlighted nodes */ public void addHighlightedNodes(Collection<Tree> nodes) { highlightedNodes = new ArrayList<Tree>(); highlightedNodes.addAll(nodes); } public void removeHighlightedNodes(Collection<Tree> nodes) { if ( highlightedNodes!=null ) { // only remove exact objects defined by ==, not equals() for (Tree t : nodes) { int i = getHighlightedNodeIndex(t); if ( i>=0 ) highlightedNodes.remove(i); } } } protected boolean isHighlighted(Tree node) { return getHighlightedNodeIndex(node) >= 0; } protected int getHighlightedNodeIndex(Tree node) { if ( highlightedNodes==null ) return -1; for (int i = 0; i < highlightedNodes.size(); i++) { Tree t = highlightedNodes.get(i); if ( t == node ) return i; } return -1; } @Override public Font getFont() { return font; } @Override public void setFont(Font font) { this.font = font; } public int getArcSize() { return arcSize; } public void setArcSize(int arcSize) { this.arcSize = arcSize; } public Color getBoxColor() { return boxColor; } public void setBoxColor(Color boxColor) { this.boxColor = boxColor; } public Color getHighlightedBoxColor() { return highlightedBoxColor; } public void setHighlightedBoxColor(Color highlightedBoxColor) { this.highlightedBoxColor = highlightedBoxColor; } public Color getBorderColor() { return borderColor; } public void setBorderColor(Color borderColor) { this.borderColor = borderColor; } public Color getTextColor() { return textColor; } public void setTextColor(Color textColor) { this.textColor = textColor; } protected TreeForTreeLayout<Tree> getTree() { return treeLayout.getTree(); } public void setTree(Tree root) { if ( root!=null ) { boolean useIdentity = true; // compare node identity this.treeLayout = new TreeLayout<Tree>(new TreeLayoutAdaptor(root), new TreeViewer.VariableExtentProvide(this), new DefaultConfiguration<Tree>(gapBetweenLevels, gapBetweenNodes), useIdentity); // Let the UI display this new AST. updatePreferredSize(); } else { this.treeLayout = null; repaint(); } } public double getScale() { return scale; } public void setScale(double scale) { if(scale <= 0) { scale = 1; } this.scale = scale; updatePreferredSize(); } public void setRuleNames(List<String> ruleNames) { setTreeTextProvider(new DefaultTreeTextProvider(ruleNames)); } private static class TreeNodeWrapper extends DefaultMutableTreeNode { final TreeViewer viewer; TreeNodeWrapper(Tree tree, TreeViewer viewer) { super(tree); this.viewer = viewer; } @Override public String toString() { return viewer.getText((Tree) this.getUserObject()); } } private static class EmptyIcon implements Icon { @Override public int getIconWidth() { return 0; } @Override public int getIconHeight() { return 0; } @Override public void paintIcon(Component c, Graphics g, int x, int y) { /* Do nothing. */ } } }
makes the UI dialog to save/restore its state - width, height, location on screen, the placement of splitter as well as viewer scale
runtime/Java/src/org/antlr/v4/runtime/tree/gui/TreeViewer.java
makes the UI dialog to save/restore its state - width, height, location on screen, the placement of splitter as well as viewer scale
Java
bsd-3-clause
b5e7fbbae3ea6913034047a4c18a2a36323d952b
0
lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon
/* * $Id: PollManager.java,v 1.244 2012-07-23 17:26:32 barry409 Exp $ */ /* Copyright (c) 2000-2012 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.poller; import static org.lockss.util.Constants.SECOND; import static org.lockss.util.Constants.MINUTE; import static org.lockss.util.Constants.HOUR; import static org.lockss.util.Constants.DAY; import static org.lockss.util.Constants.WEEK; import java.io.*; import java.util.*; import EDU.oswego.cs.dl.util.concurrent.*; import org.apache.commons.collections.map.*; import org.apache.commons.lang.builder.CompareToBuilder; import org.apache.commons.lang.mutable.MutableInt; import org.lockss.alert.*; import org.lockss.app.*; import org.lockss.config.*; import org.lockss.daemon.*; import org.lockss.daemon.status.StatusService; import org.lockss.hasher.HashService; import org.lockss.plugin.*; import org.lockss.poller.v3.*; import org.lockss.poller.v3.V3Serializer.PollSerializerException; import org.lockss.protocol.*; import org.lockss.protocol.psm.*; import org.lockss.protocol.V3LcapMessage.PollNak; import org.lockss.state.*; import org.lockss.util.*; import static org.lockss.poller.v3.V3Poller.*; import static org.lockss.poller.v3.V3Voter.*; import static org.lockss.poller.v3.V3PollFactory.*; /** * <p>Class that manages the polling process.</p> * @author Claire Griffin * @version 1.0 */ // CR: Code review comments are marked with CR: public class PollManager extends BaseLockssDaemonManager implements ConfigurableManager { protected static Logger theLog = Logger.getLogger("PollManager"); static final String PREFIX = Configuration.PREFIX + "poll."; static final String PARAM_RECENT_EXPIRATION = PREFIX + "expireRecent"; static final long DEFAULT_RECENT_EXPIRATION = DAY; /** If true, empty poll state directories found at startup will be * deleted. */ static final String PARAM_DELETE_INVALID_POLL_STATE_DIRS = PREFIX + "deleteInvalidPollStateDirs"; static final boolean DEFAULT_DELETE_INVALID_POLL_STATE_DIRS = true; /** If true, discard saved poll state at startup (i.e., don't restore * polls that were running before exit). */ static final String PARAM_DISCARD_SAVED_POLLS = PREFIX + "discardSavedPolls"; static final boolean DEFAULT_DISCARD_SAVED_POLLS = false; public static final String PARAM_ENABLE_V3_POLLER = org.lockss.poller.v3.V3PollFactory.PARAM_ENABLE_V3_POLLER; public static final boolean DEFAULT_ENABLE_V3_POLLER = org.lockss.poller.v3.V3PollFactory.DEFAULT_ENABLE_V3_POLLER; public static final String PARAM_ENABLE_V3_VOTER = org.lockss.poller.v3.V3PollFactory.PARAM_ENABLE_V3_VOTER; public static final boolean DEFAULT_ENABLE_V3_VOTER = org.lockss.poller.v3.V3PollFactory.DEFAULT_ENABLE_V3_VOTER; /** The classes of AUs for which polls should be run. May be a singleton * or list of: * <dl> * <dt>All<dd> All AUs * <dt>Internal<dd> Internal AUs (plugin registries) * <dt>Priority<dd> Poll that have been requested from DebugPanel * </dl> */ public static final String PARAM_AUTO_POLL_AUS = PREFIX + "autoPollAuClassess"; public static final List<String> DEFAULT_AUTO_POLL_AUS = ListUtil.list("All"); // Poll starter public static final String PARAM_START_POLLS_INITIAL_DELAY = PREFIX + "pollStarterInitialDelay"; public static final long DEFAULT_START_POLLS_INITIAL_DELAY = MINUTE * 10; /** Minimum interval between poll attempts on an AU. This takes effect * even if the poll failed to start. */ public static final String PARAM_MIN_POLL_ATTEMPT_INTERVAL = PREFIX + "minPollAttemptInterval"; public static final long DEFAULT_MIN_POLL_ATTEMPT_INTERVAL = 4 * HOUR; /** The time, in ms, that will be added between launching new polls. * This time is added to the channel timeout time provided by SCOMM. */ public static final String PARAM_ADDED_POLL_DELAY = PREFIX + "pollStarterAdditionalDelayBetweenPolls"; public static final long DEFAULT_ADDED_POLL_DELAY = SECOND; /** Max interval between recalculating poll queue order */ public static final String PARAM_REBUILD_POLL_QUEUE_INTERVAL = PREFIX + "queueRecalcInterval"; static final long DEFAULT_REBUILD_POLL_QUEUE_INTERVAL = HOUR; /** Interval to sleep when queue empty, before recalc. */ public static final String PARAM_QUEUE_EMPTY_SLEEP = PREFIX + "queueEmptySleep"; static final long DEFAULT_QUEUE_EMPTY_SLEEP = 30 * MINUTE; /** Interval to sleep when max number of pollers are active, before * checking again. */ public static final String PARAM_MAX_POLLERS_SLEEP = PREFIX + "maxPollersSleep"; static final long DEFAULT_MAX_POLLERS_SLEEP = 10 * MINUTE; /** Size of poll queue. */ public static final String PARAM_POLL_QUEUE_MAX = PREFIX + "pollQueueMax"; static final int DEFAULT_POLL_QUEUE_MAX = 20; /** * If set, poll starting will be throttled. This is the default. */ public static final String PARAM_ENABLE_POLL_STARTER_THROTTLE = PREFIX + "enablePollStarterThrottle"; public static boolean DEFAULT_ENABLE_POLL_STARTER_THROTTLE = true; /** If true, state machines are run in their own thread */ public static final String PARAM_PSM_ASYNCH = PREFIX + "psmAsynch"; public static final boolean DEFAULT_PSM_ASYNCH = true; /** Interval after which we'll try inviting peers that we think are not * in our polling group */ public static final String PARAM_WRONG_GROUP_RETRY_TIME = PREFIX + "wrongGroupRetryTime"; public static final long DEFAULT_WRONG_GROUP_RETRY_TIME = 4 * WEEK; static final String V3PREFIX = PREFIX + "v3."; /** Curve expressing desired inter-poll interval based on last agreement * value */ public static final String PARAM_POLL_INTERVAL_AGREEMENT_CURVE = V3PREFIX + "pollIntervalAgreementCurve"; public static final String DEFAULT_POLL_INTERVAL_AGREEMENT_CURVE = null; /** Previous poll results for which we want to apply {@link * #PARAM_POLL_INTERVAL_AGREEMENT_CURVE} */ public static final String PARAM_POLL_INTERVAL_AGREEMENT_LAST_RESULT = V3PREFIX + "pollIntervalAgreementLastResult"; public static final List DEFAULT_POLL_INTERVAL_AGREEMENT_LAST_RESULT = Collections.EMPTY_LIST; /** Curve expressing desired inter-poll interval based on number of * at-risk instances of AU */ public static final String PARAM_POLL_INTERVAL_AT_RISK_PEERS_CURVE = V3PREFIX + "pollIntervalAtRiskPeersCurve"; public static final String DEFAULT_POLL_INTERVAL_AT_RISK_PEERS_CURVE = null; /** Curve expressing poll weight multiplier based on number of at-risk * instances of AU */ public static final String PARAM_POLL_WEIGHT_AT_RISK_PEERS_CURVE = V3PREFIX + "pollWeightAtRiskPeersCurve"; public static final String DEFAULT_POLL_WEIGHT_AT_RISK_PEERS_CURVE = null; /** Curve giving reset interval of NoAuPeerIdSet as a function of AU * age */ public static final String PARAM_NO_AU_RESET_INTERVAL_CURVE = V3PREFIX + "noAuResetIntervalCurve"; public static final String DEFAULT_NO_AU_RESET_INTERVAL_CURVE = "[1w,2d],[1w,7d],[30d,7d],[30d,30d],[100d,30d],[100d,50d]"; /** Target poll interval if no other mechanism is used */ public static final String PARAM_TOPLEVEL_POLL_INTERVAL = V3PREFIX + "toplevelPollInterval"; public static final long DEFAULT_TOPLEVEL_POLL_INTERVAL = 10 * WEEK; public static class AuPeersMap extends HashMap<String,Set<PeerIdentity>> {} public static class Peer2PeerMap extends HashMap<PeerIdentity,PeerIdentity> {} private static class EntryManager { /** * A collection of running polls. */ private HashMap<String,PollManagerEntry> thePolls = new HashMap<String,PollManagerEntry>(); // FixedTimedMap is not generic. /** * A collection of finished polls, removed from the collection * when they reach a certain age. */ private FixedTimedMap theRecentPolls = new FixedTimedMap(DEFAULT_RECENT_EXPIRATION); // Items are moved between thePolls and theRecentPolls, so it's simplest // to synchronize all accesses on a single object, pollMapLock. private Object pollMapLock = thePolls; void setExpireInterval(long recentPollExpireTime) { synchronized (pollMapLock) { theRecentPolls.setInterval(recentPollExpireTime); } } /** Return the PollManagerEntry for the poll with the specified * key. */ PollManagerEntry getPollManagerEntry(String key) { synchronized (pollMapLock) { return thePolls.get(key); } } void addPoll(BasePoll poll) { synchronized (pollMapLock) { thePolls.put(poll.getKey(), new PollManagerEntry(poll)); } } PollManagerEntry removePoll(String key) { synchronized (pollMapLock) { return thePolls.remove(key); } } void addRecentPoll(String key, PollManagerEntry pme) { synchronized (pollMapLock) { theRecentPolls.put(key, pme); } } void clear() { synchronized (pollMapLock) { thePolls.clear(); theRecentPolls.clear(); } } /** * @return a the set of PollManagerEntries which are running, are * concerned with this au, and which have not yet completed. */ Set<PollManagerEntry> toCancel(ArchivalUnit au) { Set<PollManagerEntry> toCancel = new HashSet<PollManagerEntry>(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { ArchivalUnit pau = pme.poll.getCachedUrlSet().getArchivalUnit(); if (pau == au && !pme.isPollCompleted()) { toCancel.add(pme); } } } return toCancel; } /** * Is a poll of the given type and spec currently running * @param spec the PollSpec definining the location of the poll. * @return true if we have a poll which is running that matches pollspec * * @deprecated This method may be removed in a future release. */ boolean isPollRunning(PollSpec spec) { synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (pme.isSamePoll(spec)) { return !pme.isPollCompleted(); } } } return false; } public boolean isPollRunning(ArchivalUnit au) { if (au == null || au.getAuId() == null) { throw new NullPointerException("Passed a null AU or AU with null ID " + "to isPollRunning!"); } synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (au.getAuId().equals(pme.getPollSpec().getAuId())) { // Keep looking until we find a V3Poller that is active, or // we run out of poll objects to examine. If we find an active // poller, return right away. if (pme.getPoll() instanceof V3Poller) { if (pme.isPollActive()) { return true; } } } } } return false; } /** Find the poll either in current or recent polls */ PollManagerEntry getCurrentOrRecentV3PollEntry(String key) { synchronized (pollMapLock) { PollManagerEntry pme = thePolls.get(key); if (pme == null) { pme = (PollManagerEntry)theRecentPolls.get(key); } if (pme != null && !pme.isV3Poll()) { throw new IllegalStateException("Expected V3Poll: "+key); } return pme; } } Collection<V3Poller> getActiveV3Pollers() { Collection<V3Poller> polls = new ArrayList<V3Poller>(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (pme.isV3Poll() && pme.getPoll() instanceof V3Poller) { polls.add((V3Poller)pme.getPoll()); } } } return polls; } Collection<V3Poller> getRecentV3Pollers() { Collection<V3Poller> polls = new ArrayList<V3Poller>(); synchronized (pollMapLock) { for (Iterator it = theRecentPolls.values().iterator(); it.hasNext(); ) { PollManagerEntry pme = (PollManagerEntry)it.next(); if (pme.isV3Poll() && pme.getPoll() instanceof V3Poller) { polls.add((V3Poller)pme.getPoll()); } } } return polls; } Collection<V3Voter> getActiveV3Voters() { Collection<V3Voter> polls = new ArrayList<V3Voter>(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (pme.isV3Poll() && pme.getPoll() instanceof V3Voter) { polls.add((V3Voter)pme.getPoll()); } } } return polls; } Collection<V3Voter> getRecentV3Voters() { Collection<V3Voter> polls = new ArrayList<V3Voter>(); synchronized (pollMapLock) { for (Iterator it = theRecentPolls.values().iterator(); it.hasNext(); ) { PollManagerEntry pme = (PollManagerEntry)it.next(); if (pme.isV3Poll() && pme.getPoll() instanceof V3Voter) { polls.add((V3Voter)pme.getPoll()); } } } return polls; } // Reached only by TestPollManager boolean isPollClosed(String key) { PollManagerEntry pme; synchronized (pollMapLock) { pme = (PollManagerEntry)theRecentPolls.get(key); } return (pme != null) ? pme.isPollCompleted() : false; } // Used only in V1PollFactory. public boolean hasPoll(String key) { synchronized (pollMapLock) { return thePolls.containsKey(key); } } // Only used in V1PollFactory. /** * getActivePollSpecIterator returns an Iterator over the set of * PollSpec instances which currently have active polls on the given au. * @return Iterator over set of PollSpec */ Iterator<PollSpec> getActivePollSpecIterator(ArchivalUnit au, BasePoll dontIncludePoll) { Set<PollSpec> pollspecs = new HashSet<PollSpec>(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { ArchivalUnit pau = pme.poll.getCachedUrlSet().getArchivalUnit(); if (pau == au && pme.poll != dontIncludePoll && !pme.isPollCompleted()) { pollspecs.add(pme.poll.getPollSpec()); } } } return (pollspecs.iterator()); } // Used in PollerStatus.getSummary, which is V1 code. /** Find the poll either in current or recent polls */ PollManagerEntry getCurrentOrRecentV1PollEntry(String key) { synchronized (pollMapLock) { PollManagerEntry pme = thePolls.get(key); if (pme == null) { pme = (PollManagerEntry)theRecentPolls.get(key); } if (pme != null && pme.isV3Poll()) { throw new IllegalStateException("Expected V1Poll: "+key); } return pme; } } //--------------- PollerStatus Accessors ----------------------------- Collection<PollManagerEntry> getV1Polls() { Collection<PollManagerEntry> polls = new ArrayList<PollManagerEntry>(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (pme.getType() == Poll.V1_CONTENT_POLL || pme.getType() == Poll.V1_NAME_POLL || pme.getType() == Poll.V1_VERIFY_POLL) { polls.add(pme); } } for (Iterator it = theRecentPolls.values().iterator(); it.hasNext(); ) { PollManagerEntry pme = (PollManagerEntry)it.next(); if (pme.getType() == Poll.V1_CONTENT_POLL || pme.getType() == Poll.V1_NAME_POLL || pme.getType() == Poll.V1_VERIFY_POLL) { polls.add(pme); } } } return polls; } /** * suspend a poll while we wait for a repair * @param key the identifier key of the poll to suspend */ // XXX: V3 -- Only required for V1 polls. PollManagerEntry suspendPoll(String key) { PollManagerEntry pme; synchronized (pollMapLock) { pme = getCurrentOrRecentV1PollEntry(key); if (pme != null) { theRecentPolls.remove(key); thePolls.put(key, pme); pme.setPollSuspended(); } } return pme; } } private static EntryManager entryManager = new EntryManager(); private static PollManager theManager = null; private static LcapRouter.MessageHandler m_msgHandler; private static IdentityManager theIDManager; private static HashService theHashService; private static LcapRouter theRouter = null; private AlertManager theAlertManager = null; private PluginManager pluginMgr = null; private static SystemMetrics theSystemMetrics = null; private AuEventHandler auEventHandler; // CR: serializedPollers and serializedVoters s.b. updated as new // polls/votes are created, in case AU is deactivated & reactivated private HashMap serializedPollers; private HashMap serializedVoters; private V3PollStatusAccessor v3Status; private boolean deleteInvalidPollStateDirs = DEFAULT_DELETE_INVALID_POLL_STATE_DIRS; private long paramToplevelPollInterval = DEFAULT_TOPLEVEL_POLL_INTERVAL; private long pollStartInitialDelay = DEFAULT_START_POLLS_INITIAL_DELAY; private boolean enableV3Poller = DEFAULT_ENABLE_V3_POLLER; private int maxSimultaneousPollers = DEFAULT_MAX_SIMULTANEOUS_V3_POLLERS; private PollStarter pollStarter; private boolean isPollStarterEnabled = false; private boolean enablePollStarterThrottle = DEFAULT_ENABLE_POLL_STARTER_THROTTLE; private long paramRebuildPollQueueInterval = DEFAULT_REBUILD_POLL_QUEUE_INTERVAL; private long paramQueueEmptySleep = DEFAULT_QUEUE_EMPTY_SLEEP; private long paramMaxPollersSleep = DEFAULT_MAX_POLLERS_SLEEP; private int paramPollQueueMax = DEFAULT_POLL_QUEUE_MAX; private long interPollStartDelay = DEFAULT_ADDED_POLL_DELAY; private long paramMinPollAttemptInterval = DEFAULT_MIN_POLL_ATTEMPT_INTERVAL; private double paramMinPercentForRepair = V3Voter.DEFAULT_MIN_PERCENT_AGREEMENT_FOR_REPAIRS; private boolean paramDiscardSavedPolls = DEFAULT_DISCARD_SAVED_POLLS; private boolean isAsynch = DEFAULT_PSM_ASYNCH; private long wrongGroupRetryTime = DEFAULT_WRONG_GROUP_RETRY_TIME; private IpFilter noInvitationSubnetFilter = null; private CompoundLinearSlope v3InvitationWeightAgeCurve = null; // private CompoundLinearSlope v3InvitationWeightSafetyCurve = null; private CompoundLinearSlope v3AcceptProbabilitySafetyCurve = null; private CompoundLinearSlope v3NominationWeightAgeCurve = null; private CompoundLinearSlope pollIntervalAgreementCurve = null; private CompoundLinearSlope pollIntervalAtRiskPeersCurve = null; private CompoundLinearSlope pollWeightAtRiskPeersCurve = null; private Set pollIntervalAgreementLastResult = SetUtil.theSet(DEFAULT_POLL_INTERVAL_AGREEMENT_LAST_RESULT); private long paramWillingRepairerLiveness = DEFAULT_WILLING_REPAIRER_LIVENESS; private double paramAcceptRepairersPollPercent = DEFAULT_ACCEPT_REPAIRERS_POLL_PERCENT; private double paramInvitationWeightAtRisk = DEFAULT_INVITATION_WEIGHT_AT_RISK; private double paramInvitationWeightAlreadyRepairable = DEFAULT_INVITATION_WEIGHT_ALREADY_REPAIRABLE; private CompoundLinearSlope v3NoAuResetIntervalCurve = null; private CompoundLinearSlope v3VoteRetryIntervalDurationCurve = null; private Peer2PeerMap reputationTransferMap; private AuPeersMap atRiskAuInstances = null; // If true, restore V3 Pollers private boolean enablePollers = DEFAULT_ENABLE_V3_POLLER; // If true, restore V3 Voters private boolean enableVoters = DEFAULT_ENABLE_V3_VOTER; private List<String> autoPollAuClassess = DEFAULT_AUTO_POLL_AUS; // Executor used to carry out serialized poll operations. // Implementations include a queued poll executor and a pooled poll executor. private PollRunner theTaskRunner; // our configuration variables protected long m_recentPollExpireTime = DEFAULT_RECENT_EXPIRATION; Deadline startOneWait = Deadline.in(0); static class PollReq { ArchivalUnit au; int priority = 0; PollSpec spec; public PollReq(ArchivalUnit au) { this.au = au; } public PollReq setPriority(int val) { priority = val; return this; } public PollReq setPollSpec(PollSpec spec) { this.spec = spec; return this; } public ArchivalUnit getAu() { return au; } public int getPriority() { return priority; } public boolean isHighPriority() { return priority > 0; } public String toString() { return "[PollReq: " + au + ", pri: " + priority + "]"; } } /** * Keep an ordered list of PollReq. High-priority requests will * always be done first, followed by requests generated by tne * PollManager itself. */ class PollQueue { Deadline timeToRebuildPollQueue = Deadline.in(0); Object queueLock = new Object(); // lock for pollQueue /** * The processed list of poll requests, in the order they will be executed. */ private List<PollReq> pollQueue = new ArrayList<PollReq>(); /** * The high-priority pending requests. */ private Map<ArchivalUnit,PollReq> highPriorityPollRequests = Collections.synchronizedMap(new ListOrderedMap()); /** * Make a PollQueue. */ PollQueue() { } /** * @return the next PollReq to start. */ public PollReq nextReq() throws InterruptedException { boolean rebuilt = rebuildPollQueueIfNeeded(); PollReq req = nextReqFromBuiltQueue(); if (req != null) { return req; } if (!rebuilt) { rebuildPollQueue(); } return nextReqFromBuiltQueue(); } /** * Request a poll, as specified by the PollReq, sooner than might * otherwise happen. Overrides any previous requests. * @param req the PollReq */ public void enqueueHighPriorityPoll(PollReq req) { highPriorityPollRequests.put(req.au, req); needRebuildPollQueue(); } /** * Remove any requests previously registered using * enqueueHighPriorityPoll. * @param au the ArchivalUnit */ public void cancelAuPolls(ArchivalUnit au) { // todo(bhayes): This doesn't force a rebuild? Why not? highPriorityPollRequests.remove(au); } /** * Invalidate the current list of pending polls. */ public void needRebuildPollQueue() { timeToRebuildPollQueue.expire(); } /** * @return a List of ArchivalUnits in the queue to poll. */ public List<ArchivalUnit> getPendingQueueAus() { rebuildPollQueueIfNeeded(); ArrayList<ArchivalUnit> aus = new ArrayList<ArchivalUnit>(); synchronized (queueLock) { for (PollReq req : pollQueue) { aus.add(req.getAu()); } } return aus; } /** * Pop the next PollReq from the queue, and return it. */ private PollReq nextReqFromBuiltQueue() { synchronized (queueLock) { if (theLog.isDebug3()) { theLog.debug3("nextReqFromBuiltQueue(), " + pollQueue.size() + " in queue"); } while (!pollQueue.isEmpty()) { PollReq req = pollQueue.remove(0); // todo(bhayes): Why is this check here, rather than in // startOnePoll()? Also, if this is in the high priority // list, what keeps it from getting back in the rebuilt // list? // ignore deleted AUs if (pluginMgr.isActiveAu(req.getAu())) { return req; } } if (theLog.isDebug3()) { theLog.debug3("nextReqFromBuiltQueue(): null"); } return null; } } /** * Rebuild the poll queue, if needed. * @return true iff the queue was rebuilt. */ private boolean rebuildPollQueueIfNeeded() { synchronized (queueLock) { if (timeToRebuildPollQueue.expired()) { rebuildPollQueue(); return true; } return false; } } // package level for testing; todo(bhayes): The tests should // probably be calling needRebuildPollQueue and using the other // public methods. /** * Force poll queue to be rebuilt from the pending high-priority * requests and the PollManager's list of ArchivalUnits. */ void rebuildPollQueue() { timeToRebuildPollQueue.expireIn(paramRebuildPollQueueInterval); long startTime = TimeBase.nowMs(); rebuildPollQueue0(); theLog.debug("rebuildPollQueue(): "+ (TimeBase.nowMs() - startTime)+"ms"); } /** * REALLY force poll queue to be rebuilt from the pending * high-priority requests and the PollManager's list of * ArchivalUnits. */ private void rebuildPollQueue0() { synchronized (queueLock) { pollQueue.clear(); // XXX Until have real priority system, just add these in the // order they were created. Set<ArchivalUnit> highPriorityAus = new HashSet<ArchivalUnit>(); synchronized (highPriorityPollRequests) { for (PollReq req : highPriorityPollRequests.values()) { highPriorityAus.add(req.au); if (isEligibleForPoll(req)) { pollQueue.add(req); } } } int availablePollCount = paramPollQueueMax - pollQueue.size(); if (availablePollCount > 0) { Map<ArchivalUnit, Double> weightMap = new HashMap<ArchivalUnit, Double>(); for (ArchivalUnit au : pluginMgr.getAllAus()) { try { if (highPriorityAus.contains(au)) { // already tried above; might or might not have been added. continue; } try { double weight = pollWeight(au); if (weight > 0.0) { weightMap.put(au, weight); } } catch (NotEligibleException e) { if (theLog.isDebug3()) { theLog.debug3("Not eligible for poll: " + au); } } } catch (RuntimeException e) { theLog.warning("Checking for pollworthiness: " + au.getName(), e); // ignore AU if it caused an error } } // weightedRandomSelection throws if the count is larger // than the size. int count = Math.min(weightMap.size(), availablePollCount); if (!weightMap.isEmpty()) { List<ArchivalUnit> selected = weightedRandomSelection(weightMap, count); for (ArchivalUnit au : selected) { PollSpec spec = new PollSpec(au.getAuCachedUrlSet(), Poll.V3_POLL); PollReq req = new PollReq(au).setPollSpec(spec); pollQueue.add(req); } } } if (theLog.isDebug()) { theLog.debug("Poll queue: " + pollQueue); } } } } /** * The poll queue for ordering poll requests. */ protected PollQueue pollQueue = new PollQueue(); // The PollFactory instances PollFactory [] pf = { null, new V1PollFactory(), null, // new V2PollFactory(), new V3PollFactory(this), }; public PollManager() { } /** * start the poll manager. * @see org.lockss.app.LockssManager#startService() */ public void startService() { super.startService(); // Create a poll runner. theTaskRunner = new PollRunner(); // the services we use on an ongoing basis LockssDaemon theDaemon = getDaemon(); theIDManager = theDaemon.getIdentityManager(); theHashService = theDaemon.getHashService(); theAlertManager = theDaemon.getAlertManager(); pluginMgr = theDaemon.getPluginManager(); Configuration config = ConfigManager.getCurrentConfig(); if (config.containsKey(PARAM_AT_RISK_AU_INSTANCES)) { atRiskAuInstances = makeAuPeersMap(config.getList(PARAM_AT_RISK_AU_INSTANCES), theIDManager); } if (config.containsKey(PARAM_REPUTATION_TRANSFER_MAP)) { reputationTransferMap = makeReputationPeerMap(config.getList(PARAM_REPUTATION_TRANSFER_MAP), theIDManager); } // register a message handler with the router theRouter = theDaemon.getRouterManager(); m_msgHandler = new RouterMessageHandler(); theRouter.registerMessageHandler(m_msgHandler); // get System Metrics theSystemMetrics = theDaemon.getSystemMetrics(); // register our status StatusService statusServ = theDaemon.getStatusService(); statusServ.registerStatusAccessor(V3PollStatus.POLLER_STATUS_TABLE_NAME, new V3PollStatus.V3PollerStatus(this)); statusServ.registerOverviewAccessor(V3PollStatus.POLLER_STATUS_TABLE_NAME, new V3PollStatus.PollOverview(this)); statusServ.registerStatusAccessor(V3PollStatus.VOTER_STATUS_TABLE_NAME, new V3PollStatus.V3VoterStatus(this)); statusServ.registerOverviewAccessor(V3PollStatus.VOTER_STATUS_TABLE_NAME, new V3PollStatus.VoterOverview(this)); statusServ.registerStatusAccessor(V3PollStatus.POLLER_DETAIL_TABLE_NAME, new V3PollStatus.V3PollerStatusDetail(this)); statusServ.registerStatusAccessor(V3PollStatus.VOTER_DETAIL_TABLE_NAME, new V3PollStatus.V3VoterStatusDetail(this)); statusServ.registerStatusAccessor(V3PollStatus.ACTIVE_REPAIRS_TABLE_NAME, new V3PollStatus.V3ActiveRepairs(this)); statusServ.registerStatusAccessor(V3PollStatus.COMPLETED_REPAIRS_TABLE_NAME, new V3PollStatus.V3CompletedRepairs(this)); statusServ.registerStatusAccessor(V3PollStatus.NO_QUORUM_TABLE_NAME, new V3PollStatus.V3NoQuorumURLs(this)); statusServ.registerStatusAccessor(V3PollStatus.TOO_CLOSE_TABLE_NAME, new V3PollStatus.V3TooCloseURLs(this)); statusServ.registerStatusAccessor(V3PollStatus.AGREE_TABLE_NAME, new V3PollStatus.V3AgreeURLs(this)); statusServ.registerStatusAccessor(V3PollStatus.DISAGREE_TABLE_NAME, new V3PollStatus.V3DisagreeURLs(this)); statusServ.registerStatusAccessor(V3PollStatus.ERROR_TABLE_NAME, new V3PollStatus.V3ErrorURLs(this)); // register our AU event handler auEventHandler = new AuEventHandler.Base() { @Override public void auCreated(PluginManager.AuEvent event, ArchivalUnit au) { restoreAuPolls(au); } @Override public void auDeleted(PluginManager.AuEvent event, ArchivalUnit au) { cancelAuPolls(au); }}; pluginMgr.registerAuEventHandler(auEventHandler); // Maintain the state of V3 polls, since these do not use the V1 per-node // history mechanism. v3Status = new V3PollStatusAccessor(); // One time load of an in-memory map of AU IDs to directories. preloadStoredPolls(); // Enable the poll starter. enablePollStarter(); } private void enablePollStarter() { theLog.info("Starting PollStarter"); if (pollStarter != null) { theLog.debug("PollStarter already running. " + "Stopping old one first"); disablePollStarter(); } pollStarter = new PollStarter(getDaemon(), this); new Thread(pollStarter).start(); isPollStarterEnabled = true; } private void disablePollStarter() { if (pollStarter != null) { theLog.info("Stopping PollStarter"); pollStarter.stopPollStarter(); pollStarter.waitExited(Deadline.in(SECOND)); pollStarter = null; } isPollStarterEnabled = false; } /** * stop the poll manager * @see org.lockss.app.LockssManager#stopService() */ public void stopService() { disablePollStarter(); if (auEventHandler != null) { getDaemon().getPluginManager().unregisterAuEventHandler(auEventHandler); auEventHandler = null; } // unregister our status StatusService statusServ = getDaemon().getStatusService(); statusServ.unregisterStatusAccessor(V3PollStatus.POLLER_STATUS_TABLE_NAME); statusServ.unregisterOverviewAccessor(V3PollStatus.POLLER_STATUS_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.VOTER_STATUS_TABLE_NAME); statusServ.unregisterOverviewAccessor(V3PollStatus.VOTER_STATUS_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.POLLER_DETAIL_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.VOTER_DETAIL_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.ACTIVE_REPAIRS_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.COMPLETED_REPAIRS_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.NO_QUORUM_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.TOO_CLOSE_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.AGREE_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.DISAGREE_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.ERROR_TABLE_NAME); // unregister our router theRouter.unregisterMessageHandler(m_msgHandler); // Stop the poll runner. if (theTaskRunner != null) { theTaskRunner.stop(); } // null anything which might cause problems theTaskRunner = null; theIDManager = null; theHashService = null; theSystemMetrics = null; entryManager.clear(); v3Status.clear(); super.stopService(); } /** Cancel all polls on the specified AU. * @param au the AU */ void cancelAuPolls(ArchivalUnit au) { // first remove from queues pollQueue.cancelAuPolls(au); // collect polls to cancel Set<PollManagerEntry> toCancel = entryManager.toCancel(au); // then actually cancel them // Note that the poll may have completed since we collected the Set. for (PollManagerEntry pme : toCancel) { ArchivalUnit pau = pme.poll.getCachedUrlSet().getArchivalUnit(); theHashService.cancelAuHashes(pau); pme.poll.abortPoll(); } } /** * Call a poll. Used by NodeManagerImpl; V1 only. * @param pollspec the <code>PollSpec</code> that defines the subject of * the <code>Poll</code>. * @return the poll, if it was successfuly called, else null. */ public Poll callPoll(PollSpec pollspec) { if (pollspec.getProtocolVersion() != 1) { throw new IllegalArgumentException("V1 method called with: "+pollspec); } return callPoll0(pollspec); } /** * Call a poll. Used by PollStarter. * @param pollspec the <code>PollSpec</code> that defines the subject of * the <code>Poll</code>. * @param au * @return the poll, if it was successfuly called, else null. */ public Poll callPoll(ArchivalUnit au, PollSpec pollspec) { AuState auState = AuUtil.getAuState(au); auState.pollAttempted(); return callPoll0(pollspec); } private Poll callPoll0(PollSpec pollspec) { String errMsg = null; PollFactory pollFact = getPollFactory(pollspec); if (pollFact != null) { long duration = pollFact.calcDuration(pollspec, this); if (duration > 0) { try { PeerIdentity orig = theIDManager.getLocalPeerIdentity(pollspec.getProtocolVersion()); BasePoll thePoll = makePoller(pollspec, duration, orig); if (thePoll != null) { return thePoll; } else { theLog.debug("makePoller(" + pollspec + ") returned null"); } } catch (ProtocolException ex) { theLog.debug("Error in makePoller or callPoll", ex); } } else { errMsg = "Too busy"; theLog.debug("No duration within limit"); } } else { errMsg = "Unknown poll version: " + pollspec.getProtocolVersion(); } theLog.debug("Poll not started: " + errMsg + ", au: " + pollspec.getAuId()); return null; } /** * Is a poll of the given type and spec currently running * @param spec the PollSpec definining the location of the poll. * @return true if we have a poll which is running that matches pollspec * * @deprecated This method may be removed in a future release. */ public boolean isPollRunning(PollSpec spec) { return entryManager.isPollRunning(spec); } public boolean isPollRunning(ArchivalUnit au) { return entryManager.isPollRunning(au); } /** Return the PollManagerEntry for the poll with the specified key. */ private PollManagerEntry getPollManagerEntry(String key) { return entryManager.getPollManagerEntry(key); } // Used in PollerStatus.getSummary, which is V1 code. /** Find the poll either in current or recent polls */ PollManagerEntry getCurrentOrRecentV1PollEntry(String key) { return entryManager.getCurrentOrRecentV1PollEntry(key); } /** Find the poll either in current or recent polls */ private PollManagerEntry getCurrentOrRecentV3PollEntry(String key) { return entryManager.getCurrentOrRecentV3PollEntry(key); } // XXX: V3 -- Only required for V1 polls. public ActivityRegulator.Lock acquirePollLock(String key) { ActivityRegulator.Lock lock = null; PollManagerEntry pme = entryManager.getCurrentOrRecentV1PollEntry(key); if(pme != null) { PollTally tally = pme.poll.getVoteTally(); if(tally != null) { lock = tally.getActivityLock(); tally.setActivityLock(null); } } return lock; } /** * suspend a poll while we wait for a repair * @param key the identifier key of the poll to suspend */ // XXX: V3 -- Only required for V1 polls. public void suspendPoll(String key) { PollManagerEntry pme = entryManager.suspendPoll(key); if (pme == null) { theLog.debug2("ignoring suspend request for unknown key " + key); } else { theLog.debug("suspended poll " + key); } } /** * resume a poll that had been suspended for a repair and check the repair * @param replayNeeded true we now need to replay the poll results * @param key the key of the suspended poll */ // XXX: V3 -- Only required for V1 polls. public void resumePoll(boolean replayNeeded, String key, ActivityRegulator.Lock lock) { PollManagerEntry pme = getPollManagerEntry(key); if(pme == null) { theLog.debug2("ignoring resume request for unknown key " + key); return; } theLog.debug("resuming poll " + key); PollTally tally = pme.getPoll().getVoteTally(); tally.setActivityLock(lock); long expiration = 0; Deadline d; NodeManager nm = getDaemon().getNodeManager(tally.getArchivalUnit()); nm.startPoll(tally.getCachedUrlSet(), tally, true); if (replayNeeded) { theLog.debug2("starting replay of poll " + key); PollFactory pollFact = getPollFactory(pme.poll.getVersion()); // should be equivalent to this. is it? // PollFactory pollFact = getPollFactory(pme.spec); if (pollFact != null) { expiration = pollFact.getMaxPollDuration(Poll.V1_CONTENT_POLL); } else { expiration = 0; // XXX } d = Deadline.in(expiration); tally.startReplay(d); } else { pme.poll.stopPoll(); } theLog.debug3("completed resume poll " + (String) key); } /** * handle an incoming message packet. This will create a poll if * one is not already running. It will then call recieveMessage on * the poll. This was moved from node state which kept track of the polls * running in the node. This will need to be moved or amended to support this. * @param msg the message used to generate the poll * @throws IOException thrown if the poll was unsuccessfully created */ void handleIncomingMessage(LcapMessage msg) throws IOException { if (theLog.isDebug2()) theLog.debug2("Got a message: " + msg); PollFactory fact = getPollFactory(msg); if(fact.isDuplicateMessage(msg, this)) { theLog.debug3("Dropping duplicate message:" + msg); return; } String key = msg.getKey(); PollManagerEntry pme; if (msg instanceof V1LcapMessage) { // Needed for TestPollManager.testCloseThePoll to pass. pme = getCurrentOrRecentV1PollEntry(key); } else { pme = getPollManagerEntry(key); } if(pme != null) { if(pme.isPollCompleted() || pme.isPollSuspended()) { theLog.debug("Message received after poll was closed." + msg); return; } } BasePoll p = findPoll(msg); if (p != null) { p.setMessage(msg); p.receiveMessage(msg); } } /** * Find the poll defined by the <code>Message</code>. If the poll * does not exist this will create a new poll (iff there are no conflicts) * @param msg <code>Message</code> * @return <code>Poll</code> which matches the message opcode, or a new * poll, or null if the new poll would conflict with a currently running poll. * @throws IOException if message opcode is unknown. */ synchronized BasePoll findPoll(LcapMessage msg) throws IOException { String key = msg.getKey(); BasePoll ret = null; PollManagerEntry pme = getPollManagerEntry(key); if (pme == null) { theLog.debug3("findPoll: Making new poll: " + key); // makePoll will add the poll to the entryMap. todo(bhayes): // can this synchronization be improved? As it is, we lock this, // then makePoll will eventually grab the pollMapLock. ret = makePoll(msg); if (theLog.isDebug3()) { if (ret != null) { theLog.debug3("findPoll: Made new poll: " + key); } else { theLog.debug3("findPoll: Did not make new poll: " + key); } } } else { theLog.debug3("findPoll: Returning existing poll: " + key); ret = pme.poll; } return ret; } /** * make a new poll of the type and version defined by the incoming message. * @param msg <code>Message</code> to use for * @return a new Poll object of the required type, or null if we don't * want to run this poll now (<i>ie</i>, due to a conflict with another * poll). * @throws ProtocolException if message opcode is unknown */ BasePoll makePoll(LcapMessage msg) throws ProtocolException { theLog.debug2("makePoll: From message: " + msg); // XXX: V3 Refactor - this could be cleaned up // Dispatch on the type of the msg. if (msg instanceof V1LcapMessage) { return makeV1Poll((V1LcapMessage)msg); } else if (msg instanceof V3LcapMessage) { return makeV3Voter((V3LcapMessage)msg); } else { throw new ProtocolException("Unexpected LCAP Message type."); } } /** * Make a V3Voter. */ private BasePoll makeV3Voter(V3LcapMessage msg) throws ProtocolException { PollSpec spec = new PollSpec(msg); long duration = msg.getDuration(); PeerIdentity orig = msg.getOriginatorId(); String hashAlg = msg.getHashAlgorithm(); theLog.debug("Making V3Voter from: " + spec); PollFactory pollFact = getPollFactory(spec); BasePoll poll = pollFact.createPoll(spec, getDaemon(), orig, duration, hashAlg, msg); if (poll != null && !(poll instanceof V3Voter)) { throw new ProtocolException("msg "+msg+ " made unexpected kind of poll: "+poll); } processNewPoll(poll, msg); return poll; } /** * V1 for testing only. */ private BasePoll makeV1Poll(V1LcapMessage msg) throws ProtocolException { PollSpec spec = new PollSpec(msg); long duration = msg.getDuration(); PeerIdentity orig = msg.getOriginatorId(); String hashAlg = msg.getHashAlgorithm(); CachedUrlSet cus = spec.getCachedUrlSet(); // check for presence of item in the cache if (cus == null) { theLog.debug2("Ignoring poll request, don't have AU: " + spec.getAuId()); return null; } ArchivalUnit au = cus.getArchivalUnit(); if (!spec.getPluginVersion().equals(AuUtil.getPollVersion(au))) { theLog.debug("Ignoring poll request for " + au.getName() + " from peer " + msg.getOriginatorId() + ". plugin version mismatch; have: " + AuUtil.getPollVersion(au) + ", need: " + spec.getPluginVersion()); return null; } theLog.debug("Making poll from: " + spec); PollFactory pollFact = getPollFactory(spec); BasePoll poll = pollFact.createPoll(spec, getDaemon(), orig, duration, hashAlg, msg); processNewPoll(poll, msg); return poll; } private BasePoll makePoller(PollSpec spec, long duration, PeerIdentity orig) throws ProtocolException { theLog.debug("Making poll from: " + spec); // If this is a V3 PollSpec, passing null to V3PollFactory will // create a V3Poller PollFactory pollFact = getPollFactory(spec); String hashAlg = LcapMessage.getDefaultHashAlgorithm(); BasePoll poll = pollFact.createPoll(spec, getDaemon(), orig, duration, hashAlg, null); processNewPoll(poll, null); return poll; } /** * If poll is not null, do what needs to be done to new polls. */ private void processNewPoll(BasePoll poll, LcapMessage msg) { if (poll != null) { poll.setMessage(msg); entryManager.addPoll(poll); poll.startPoll(); } } /** * close the poll from any further voting * @param key the poll signature */ public void closeThePoll(String key) { PollManagerEntry pme = entryManager.removePoll(key); if (pme == null || pme.poll == null) { theLog.warning("Attempt to close unknown poll : " + key); return; } // mark the poll completed because if we need to call a repair poll // we don't want this one to be in conflict with it. // PollTally tally = pme.poll.getVoteTally(); BasePoll p = pme.getPoll(); pme.setPollCompleted(); // todo(bhayes): No synchronized on the removePoll/addRecentPoll; // display could see a list without the poll. entryManager.addRecentPoll(key, pme); try { theIDManager.storeIdentities(); } catch (ProtocolException ex) { theLog.error("Unable to write Identity DB file."); } NodeManager nm = getDaemon().getNodeManager(p.getAu()); // XXX: This is hacked up, admittedly. The entire NodeManager // and repository are getting overhauled anyway, so it makes // no sense to do the "right" thing here by integrating this // into the NodeManager somehow. if (p.getType() == Poll.V3_POLL) { // Retrieve the node state for the top-level AU NodeStateImpl ns = (NodeStateImpl)nm.getNodeState(p.getCachedUrlSet()); if (ns != null) ns.closeV3Poll(p.getKey()); } // XXX: V3 -- Only required for V1 polls. // // Don't tell the node manager about verify polls // If closing a name poll that started ranged subpolls, don't tell // the node manager about it until all ranged subpolls have finished if ((p.getType() == Poll.V1_NAME_POLL || p.getType() == Poll.V1_CONTENT_POLL) && !p.isSubpollRunning()) { V1PollTally tally = (V1PollTally)p.getVoteTally(); // if closing last name poll, concatenate all the name lists into the // first tally and pass that to node manager if (p.getType() == Poll.V1_NAME_POLL) { V1PollTally lastTally = (V1PollTally)tally; tally = lastTally.concatenateNameSubPollLists(); } theLog.debug("handing poll results to node manager: " + tally); nm.updatePollResults(p.getCachedUrlSet(), tally); // free the activity lock ActivityRegulator.Lock lock = tally.getActivityLock(); if(lock != null) { lock.expire(); } } } // Only used in V1PollFactory. /** * getActivePollSpecIterator returns an Iterator over the set of * PollSpec instances which currently have active polls on the given au. * @return Iterator over set of PollSpec */ protected Iterator<PollSpec> getActivePollSpecIterator(ArchivalUnit au, BasePoll dontIncludePoll) { return entryManager.getActivePollSpecIterator(au, dontIncludePoll); } public void raiseAlert(Alert alert) { theAlertManager.raiseAlert(alert); } public void raiseAlert(Alert alert, String msg) { theAlertManager.raiseAlert(alert, msg); } /** * Ask that the specified poll runner task be executed. */ public void runTask(PollRunner.Task task) { theTaskRunner.runTask(task); } /** * send a message to the multicast address for this archival unit * @param msg the LcapMessage to send * @param au the ArchivalUnit for this message * @throws IOException */ void sendMessage(V1LcapMessage msg, ArchivalUnit au) throws IOException { if(theRouter != null) { theRouter.send(msg, au); } } /** * send a message to the unicast address given by an identity * @param msg the LcapMessage to send * @param au the ArchivalUnit for this message * @param id the PeerIdentity of the identity to send to * @throws IOException */ void sendMessageTo(V1LcapMessage msg, ArchivalUnit au, PeerIdentity id) throws IOException { theRouter.sendTo(msg, au, id); } /** * send a message to the unicast address given by an identity * @param msg the LcapMessage to send * @param id the PeerIdentity of the identity to send to * @throws IOException */ public void sendMessageTo(V3LcapMessage msg, PeerIdentity id) throws IOException { theRouter.sendTo(msg, id); } /** * @return the state directory for the given V3 poll. */ // CR: add getStateDir() to BasePoll to avoid downcast public File getStateDir(String pollKey) { if (pollKey == null) return null; Poll p = this.getPoll(pollKey); if (p != null) { if (p instanceof V3Voter) { return ((V3Voter)p).getStateDir(); } else if (p instanceof V3Poller) { return ((V3Poller)p).getStateDir(); } } return null; } IdentityManager getIdentityManager() { return theIDManager; } HashService getHashService() { return theHashService; } public void setConfig(Configuration newConfig, Configuration oldConfig, Configuration.Differences changedKeys) { if (changedKeys.contains(PREFIX)) { m_recentPollExpireTime = newConfig.getTimeInterval(PARAM_RECENT_EXPIRATION, DEFAULT_RECENT_EXPIRATION); entryManager.setExpireInterval(m_recentPollExpireTime); enablePollers = newConfig.getBoolean(PARAM_ENABLE_V3_POLLER, DEFAULT_ENABLE_V3_POLLER); enableVoters = newConfig.getBoolean(PARAM_ENABLE_V3_VOTER, DEFAULT_ENABLE_V3_VOTER); autoPollAuClassess = newConfig.getList(PARAM_AUTO_POLL_AUS, DEFAULT_AUTO_POLL_AUS); for (ListIterator<String> iter = autoPollAuClassess.listIterator(); iter.hasNext(); ) { iter.set(iter.next().toLowerCase()); } deleteInvalidPollStateDirs = newConfig.getBoolean(PARAM_DELETE_INVALID_POLL_STATE_DIRS, DEFAULT_DELETE_INVALID_POLL_STATE_DIRS); paramDiscardSavedPolls = newConfig.getBoolean(PARAM_DISCARD_SAVED_POLLS, DEFAULT_DISCARD_SAVED_POLLS); paramToplevelPollInterval = newConfig.getTimeInterval(PARAM_TOPLEVEL_POLL_INTERVAL, DEFAULT_TOPLEVEL_POLL_INTERVAL); pollStartInitialDelay = newConfig.getTimeInterval(PARAM_START_POLLS_INITIAL_DELAY, DEFAULT_START_POLLS_INITIAL_DELAY); paramQueueEmptySleep = newConfig.getTimeInterval(PARAM_QUEUE_EMPTY_SLEEP, DEFAULT_QUEUE_EMPTY_SLEEP); paramMaxPollersSleep = newConfig.getTimeInterval(PARAM_MAX_POLLERS_SLEEP, DEFAULT_MAX_POLLERS_SLEEP); paramPollQueueMax = newConfig.getInt(PARAM_POLL_QUEUE_MAX, DEFAULT_POLL_QUEUE_MAX); paramRebuildPollQueueInterval = newConfig.getTimeInterval(PARAM_REBUILD_POLL_QUEUE_INTERVAL, DEFAULT_REBUILD_POLL_QUEUE_INTERVAL); paramMinPollAttemptInterval = newConfig.getTimeInterval(PARAM_MIN_POLL_ATTEMPT_INTERVAL, DEFAULT_MIN_POLL_ATTEMPT_INTERVAL); boolean oldEnable = enableV3Poller; enableV3Poller = newConfig.getBoolean(PARAM_ENABLE_V3_POLLER, DEFAULT_ENABLE_V3_POLLER); maxSimultaneousPollers = newConfig.getInt(PARAM_MAX_SIMULTANEOUS_V3_POLLERS, DEFAULT_MAX_SIMULTANEOUS_V3_POLLERS); enablePollStarterThrottle = newConfig.getBoolean(PARAM_ENABLE_POLL_STARTER_THROTTLE, DEFAULT_ENABLE_POLL_STARTER_THROTTLE); isAsynch = newConfig.getBoolean(PARAM_PSM_ASYNCH, DEFAULT_PSM_ASYNCH); wrongGroupRetryTime = newConfig.getTimeInterval(PARAM_WRONG_GROUP_RETRY_TIME, DEFAULT_WRONG_GROUP_RETRY_TIME); paramMinPercentForRepair = newConfig.getPercentage(V3Voter.PARAM_MIN_PERCENT_AGREEMENT_FOR_REPAIRS, V3Voter.DEFAULT_MIN_PERCENT_AGREEMENT_FOR_REPAIRS); paramWillingRepairerLiveness = newConfig.getTimeInterval(PARAM_WILLING_REPAIRER_LIVENESS, DEFAULT_WILLING_REPAIRER_LIVENESS); paramAcceptRepairersPollPercent = newConfig.getPercentage(PARAM_ACCEPT_REPAIRERS_POLL_PERCENT, DEFAULT_ACCEPT_REPAIRERS_POLL_PERCENT); paramInvitationWeightAtRisk = newConfig.getDouble(PARAM_INVITATION_WEIGHT_AT_RISK, DEFAULT_INVITATION_WEIGHT_AT_RISK); paramInvitationWeightAlreadyRepairable = newConfig.getDouble(PARAM_INVITATION_WEIGHT_ALREADY_REPAIRABLE, DEFAULT_INVITATION_WEIGHT_ALREADY_REPAIRABLE); List<String> noInvitationIps = newConfig.getList(V3Poller.PARAM_NO_INVITATION_SUBNETS, null); if (noInvitationIps == null || noInvitationIps.isEmpty()) { noInvitationSubnetFilter = null; } else { try { IpFilter filter = new IpFilter(); filter.setFilters(noInvitationIps, Collections.EMPTY_LIST); noInvitationSubnetFilter = filter; } catch (IpFilter.MalformedException e) { theLog.warning("Malformed noInvitationIps, not installed: " + noInvitationIps, e); } } if (changedKeys.contains(PARAM_AT_RISK_AU_INSTANCES) && theIDManager != null) { atRiskAuInstances = makeAuPeersMap(newConfig.getList(PARAM_AT_RISK_AU_INSTANCES), theIDManager); } if (changedKeys.contains(PARAM_REPUTATION_TRANSFER_MAP) && theIDManager != null) { reputationTransferMap = makeReputationPeerMap(newConfig.getList(PARAM_REPUTATION_TRANSFER_MAP), theIDManager); } if (changedKeys.contains(PARAM_INVITATION_WEIGHT_AGE_CURVE)) { v3InvitationWeightAgeCurve = processWeightCurve("V3 invitation weight age curve", newConfig, PARAM_INVITATION_WEIGHT_AGE_CURVE, DEFAULT_INVITATION_WEIGHT_AGE_CURVE); } // if (changedKeys.contains(PARAM_INVITATION_WEIGHT_SAFETY_CURVE)) { // v3InvitationWeightSafetyCurve = // processWeightCurve("V3 invitation weight safety curve", // newConfig, // PARAM_INVITATION_WEIGHT_SAFETY_CURVE, // DEFAULT_INVITATION_WEIGHT_SAFETY_CURVE); // } if (changedKeys.contains(PARAM_POLL_INTERVAL_AGREEMENT_CURVE)) { pollIntervalAgreementCurve = processWeightCurve("V3 poll interval agreement curve", newConfig, PARAM_POLL_INTERVAL_AGREEMENT_CURVE, DEFAULT_POLL_INTERVAL_AGREEMENT_CURVE); } if (changedKeys.contains(PARAM_POLL_INTERVAL_AT_RISK_PEERS_CURVE)) { pollIntervalAtRiskPeersCurve = processWeightCurve("V3 poll interval at risk peers curve", newConfig, PARAM_POLL_INTERVAL_AT_RISK_PEERS_CURVE, DEFAULT_POLL_INTERVAL_AT_RISK_PEERS_CURVE); } if (changedKeys.contains(PARAM_POLL_INTERVAL_AGREEMENT_LAST_RESULT)) { List<String> lst = newConfig.getList(PARAM_POLL_INTERVAL_AGREEMENT_LAST_RESULT, DEFAULT_POLL_INTERVAL_AGREEMENT_LAST_RESULT); Set res = new HashSet(); for (String str : lst) { res.add(Integer.valueOf(str)); } pollIntervalAgreementLastResult = res; } if (changedKeys.contains(PARAM_POLL_WEIGHT_AT_RISK_PEERS_CURVE)) { pollWeightAtRiskPeersCurve = processWeightCurve("V3 poll weight at risk peers curve", newConfig, PARAM_POLL_WEIGHT_AT_RISK_PEERS_CURVE, DEFAULT_POLL_WEIGHT_AT_RISK_PEERS_CURVE); } if (changedKeys.contains(PARAM_ACCEPT_PROBABILITY_SAFETY_CURVE)) { v3AcceptProbabilitySafetyCurve = processWeightCurve("V3 accept probability safety curve", newConfig, PARAM_ACCEPT_PROBABILITY_SAFETY_CURVE, DEFAULT_ACCEPT_PROBABILITY_SAFETY_CURVE); } if (changedKeys.contains(PARAM_NOMINATION_WEIGHT_AGE_CURVE)) { v3NominationWeightAgeCurve = processWeightCurve("V3 nomination weight age curve", newConfig, PARAM_NOMINATION_WEIGHT_AGE_CURVE, DEFAULT_NOMINATION_WEIGHT_AGE_CURVE); } if (changedKeys.contains(PARAM_NO_AU_RESET_INTERVAL_CURVE)) { v3NoAuResetIntervalCurve = processWeightCurve("V3 no-AU reset interval curve", newConfig, PARAM_NO_AU_RESET_INTERVAL_CURVE, DEFAULT_NO_AU_RESET_INTERVAL_CURVE); } if (changedKeys.contains(PARAM_VOTE_RETRY_INTERVAL_DURATION_CURVE)) { v3VoteRetryIntervalDurationCurve = processWeightCurve("V3 vote message retry interval age curve", newConfig, PARAM_VOTE_RETRY_INTERVAL_DURATION_CURVE, DEFAULT_VOTE_RETRY_INTERVAL_DURATION_CURVE); } needRebuildPollQueue(); } long scommTimeout = newConfig.getTimeInterval(BlockingStreamComm.PARAM_CONNECT_TIMEOUT, BlockingStreamComm.DEFAULT_CONNECT_TIMEOUT); long psmRunnerTimeout = newConfig.getTimeInterval(PsmManager.PARAM_RUNNER_IDLE_TIME, PsmManager.DEFAULT_RUNNER_IDLE_TIME); long addedTimeout = newConfig.getTimeInterval(PARAM_ADDED_POLL_DELAY, DEFAULT_ADDED_POLL_DELAY); interPollStartDelay = (Math.max(scommTimeout, psmRunnerTimeout) + addedTimeout); for (int i = 0; i < pf.length; i++) { if (pf[i] != null) { pf[i].setConfig(newConfig, oldConfig, changedKeys); } } } /** Build reputation map backwards (toPid -> fromPid) because it's * accessed to determine whether a repair should be served (as opposed to * when reputation is established), so we need to look up toPid to see if * another peer's reputation should be extended to it. This implies that * only one peer's reputation may be extended to any other peer */ Peer2PeerMap makeReputationPeerMap(Collection<String> peerPairs, IdentityManager idMgr) { Peer2PeerMap res = new Peer2PeerMap(); for (String onePair : peerPairs) { List<String> lst = StringUtil.breakAt(onePair, ',', -1, true, true); if (lst.size() == 2) { try { PeerIdentity pid1 = idMgr.stringToPeerIdentity(lst.get(0)); PeerIdentity pid2 = idMgr.stringToPeerIdentity(lst.get(1)); res.put(pid2, pid1); if (theLog.isDebug2()) { theLog.debug2("Extend reputation from " + pid1 + " to " + pid2); } } catch (IdentityManager.MalformedIdentityKeyException e) { theLog.warning("Bad peer id in peer2peer map", e); } } else { theLog.warning("Malformed reputation mapping: " + onePair); } } return res; } AuPeersMap makeAuPeersMap(Collection<String> auPeersList, IdentityManager idMgr) { AuPeersMap res = new AuPeersMap(); Map<Integer,MutableInt> hist = new TreeMap<Integer,MutableInt>(); for (String oneAu : auPeersList) { List<String> lst = StringUtil.breakAt(oneAu, ',', -1, true, true); if (lst.size() >= 2) { String auid = null; Set peers = new HashSet(); for (String s : lst) { if (auid == null) { auid = s; } else { try { PeerIdentity pid = idMgr.stringToPeerIdentity(s); peers.add(pid); } catch (IdentityManager.MalformedIdentityKeyException e) { theLog.warning("Bad peer on at risk list for " + auid, e); } } } res.put(auid, peers); int size = peers.size(); MutableInt n = hist.get(size); if (n == null) { n = new MutableInt(); hist.put(size, n); } n.add(1); } } StringBuilder sb = new StringBuilder(); sb.append("AU peers hist:\nAUs at risk on\n\tPeers"); for (Map.Entry<Integer,MutableInt> ent : hist.entrySet()) { sb.append("\n"); sb.append(ent.getKey()); sb.append("\t"); sb.append(ent.getValue()); } theLog.debug(sb.toString()); return res; } public PeerIdentity getReputationTransferredFrom(PeerIdentity pid) { if (reputationTransferMap != null) { return reputationTransferMap.get(pid); } return null; } public Set<PeerIdentity> getPeersWithAuAtRisk(ArchivalUnit au) { if (atRiskAuInstances == null) { return null; } return atRiskAuInstances.get(au.getAuId()); } CompoundLinearSlope processWeightCurve(String name, Configuration config, String param, String dfault) { String probCurve = config.get(param, dfault); if (StringUtil.isNullString(probCurve)) { return null; } else { try { CompoundLinearSlope curve = new CompoundLinearSlope(probCurve); theLog.info("Installed " + name + ": " + curve); return curve; } catch (Exception e) { theLog.warning("Malformed " + name + ": " + probCurve, e); return null; } } } public boolean isAsynch() { return isAsynch; } public long getWrongGroupRetryTime() { return wrongGroupRetryTime; } public IpFilter getNoInvitationSubnetFilter() { return noInvitationSubnetFilter; } public CompoundLinearSlope getInvitationWeightAgeCurve() { return v3InvitationWeightAgeCurve; } // public CompoundLinearSlope getInvitationWeightSafetyCurve() { // return v3InvitationWeightSafetyCurve; // } public CompoundLinearSlope getAcceptProbabilitySafetyCurve() { return v3AcceptProbabilitySafetyCurve; } public CompoundLinearSlope getNominationWeightAgeCurve() { return v3NominationWeightAgeCurve; } public double getInvitationWeightAtRisk() { return paramInvitationWeightAtRisk; } public double getInvitationWeightAlreadyRepairable() { return paramInvitationWeightAlreadyRepairable; } public CompoundLinearSlope getPollIntervalAgreementCurve() { return pollIntervalAgreementCurve; } public Set getPollIntervalAgreementLastResult() { return pollIntervalAgreementLastResult; } public CompoundLinearSlope getVoteRetryIntervalDurationCurve() { return v3VoteRetryIntervalDurationCurve; } public long getWillingRepairerLiveness() { return paramWillingRepairerLiveness; } public double getAcceptRepairersPollPercent() { return paramAcceptRepairersPollPercent; } public double getMinPercentForRepair() { return paramMinPercentForRepair; } public boolean isNoInvitationSubnet(PeerIdentity pid) { IpFilter filter = getNoInvitationSubnetFilter(); return filter != null && pid.getPeerAddress().isAllowed(filter); } // Ensure only a single instance of a noAuSet exists for each AU, so can // synchronize on them and use in multiple threads. Map<ArchivalUnit,DatedPeerIdSet> noAuPeerSets = new ReferenceMap(ReferenceMap.HARD, ReferenceMap.WEAK); /** Return the noAuSet for the AU. If an instance of the noAuSet for * this AU already exists in memory it will be returned. The caller must * synchronize on that object before operating on it */ public DatedPeerIdSet getNoAuPeerSet(ArchivalUnit au) { synchronized (noAuPeerSets) { DatedPeerIdSet noAuSet = noAuPeerSets.get(au); if (noAuSet == null) { HistoryRepository historyRepo = getDaemon().getHistoryRepository(au); noAuSet = historyRepo.getNoAuPeerSet(); noAuPeerSets.put(au, noAuSet); } return noAuSet; } } /** Clear the noAuSet if it's older than the interval specified as a * function of the AU's age by v3NoAuResetIntervalCurve */ public void ageNoAuSet(ArchivalUnit au, DatedPeerIdSet noAuSet) { try { if (noAuSet.isEmpty()) { return; } long lastTimestamp = noAuSet.getDate(); if (lastTimestamp < 0) { return; } AuState state = AuUtil.getAuState(au); long auAge = TimeBase.msSince(state.getAuCreationTime()); long threshold = (long)Math.round(v3NoAuResetIntervalCurve.getY(auAge)); if (TimeBase.msSince(lastTimestamp) >= threshold) { noAuSet.clear(); noAuSet.store(false); } } catch (IOException e) { // impossible with loaded PersistentPeerIdSet theLog.warning("Impossible error in loaded PersistentPeerIdSet", e); } } public PollFactory getPollFactory(PollSpec spec) { return getPollFactory(spec.getProtocolVersion()); } public PollFactory getPollFactory(LcapMessage msg) { return getPollFactory(msg.getProtocolVersion()); } public PollFactory getPollFactory(int version) { try { return pf[version]; } catch (ArrayIndexOutOfBoundsException e) { theLog.error("Unknown poll version: " + version, e); return null; } } /** * Load and start V3 polls that are found in a serialized state * on the disk. If the poll has expired, or if the state has been * corrupted, delete the poll directory. */ private void preloadStoredPolls() { this.serializedPollers = new HashMap(); this.serializedVoters = new HashMap(); File stateDir = PollUtil.ensurePollStateRoot(); File[] dirs = stateDir.listFiles(); if (dirs == null || dirs.length == 0) { theLog.debug2("No saved polls found."); return; } for (int ix = 0; ix < dirs.length; ix++) { boolean restored = false; // 1. See if there's a serialized poller. if (enablePollers) { File poller = new File(dirs[ix], V3PollerSerializer.POLLER_STATE_BEAN); if (poller.exists()) { if (paramDiscardSavedPolls) { theLog.debug("Discarding poll in directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); continue; } // Add this poll dir to the serialized polls map. try { V3PollerSerializer pollSerializer = new V3PollerSerializer(getDaemon(), dirs[ix]); PollerStateBean psb = pollSerializer.loadPollerState(); // Check to see if this poll has expired. boolean expired = psb.getPollDeadline() <= TimeBase.nowMs(); if (expired) { theLog.debug("Discarding expires poll in directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); continue; } theLog.debug2("Found saved poll for AU " + psb.getAuId() + " in directory " + dirs[ix]); // CR: Should never be more than one saved poll per AU. Don't // need Set, and error if find more than one Set pollsForAu = null; if ((pollsForAu = (Set)serializedPollers.get(psb.getAuId())) == null) { pollsForAu = new HashSet(); serializedPollers.put(psb.getAuId(), pollsForAu); } pollsForAu.add(dirs[ix]); restored = true; } catch (PollSerializerException e) { theLog.error("Exception while trying to restore poller from " + "directory: " + dirs[ix] + ". Cleaning up dir.", e); FileUtil.delTree(dirs[ix]); continue; } } else { theLog.debug("No serialized poller found in dir " + dirs[ix]); } } // 2. See if there's a serialized voter. if (enableVoters) { File voter = new File(dirs[ix], V3VoterSerializer.VOTER_USER_DATA_FILE); if (voter.exists()) { if (paramDiscardSavedPolls) { theLog.debug("Discarding vote in directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); continue; } theLog.info("Found serialized voter in file: " + voter); try { V3VoterSerializer voterSerializer = new V3VoterSerializer(getDaemon(), dirs[ix]); VoterUserData vd = voterSerializer.loadVoterUserData(); // Check to see if this poll has expired. boolean expired = vd.getDeadline() <= TimeBase.nowMs(); if (expired) { theLog.debug("Discarding expired vote in directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); continue; } theLog.debug2("Found saved poll for AU " + vd.getAuId() + " in directory " + dirs[ix]); Set pollsForAu = null; if ((pollsForAu = (Set)serializedVoters.get(vd.getAuId())) == null) { pollsForAu = new HashSet(); serializedVoters.put(vd.getAuId(), pollsForAu); } pollsForAu.add(dirs[ix]); restored = true; } catch (PollSerializerException e) { theLog.error("Exception while trying to restore voter from " + "directory: " + dirs[ix] + ". Cleaning up dir.", e); FileUtil.delTree(dirs[ix]); continue; } } else { theLog.debug("No serialized voter found in dir " + dirs[ix]); } } // If neither a voter nor a poller was found, this dir can be // cleaned up, unless KEEP_INVALID_POLLSTATE_DIRS is true. if (!restored) { if (deleteInvalidPollStateDirs) { theLog.debug("Deleting invalid poll state directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); } else { theLog.debug("Not deleting invalid poll state directory " + dirs[ix] + " due to config."); } } } } public void restoreAuPolls(ArchivalUnit au) { // Shouldn't happen. if (serializedPollers == null) { throw new NullPointerException("Null serialized poll map."); } if (serializedVoters == null) { throw new NullPointerException("Null serialized voter map."); } // Restore any pollers for this AU. // CR: Don't need loop here, s.b. max 1 poller per AU Set pollDirs = (Set)serializedPollers.get(au.getAuId()); if (pollDirs != null) { Iterator pollDirIter = pollDirs.iterator(); while (pollDirIter.hasNext()) { File dir = (File)pollDirIter.next(); try { V3Poller p = new V3Poller(getDaemon(), dir); addPoll(p); p.startPoll(); } catch (PollSerializerException e) { theLog.error("Unable to restore poller from dir: " + dir, e); } } serializedPollers.remove(au.getAuId()); } // Restore any voters for this AU. Set voterDirs = (Set)serializedVoters.get(au.getAuId()); if (voterDirs != null) { Iterator voterDirIter = voterDirs.iterator(); while (voterDirIter.hasNext()) { File dir = (File)voterDirIter.next(); try { V3Voter v = new V3Voter(getDaemon(), dir); addPoll(v); v.startPoll(); } catch (PollSerializerException e) { theLog.error("Unable to restore poller from dir: " + dir, e); } } serializedVoters.remove(au.getAuId()); } } public PollRunner getPollRunner() { return theTaskRunner; } //--------------- PollerStatus Accessors ----------------------------- public Collection<PollManagerEntry> getV1Polls() { return entryManager.getV1Polls(); } private Collection<V3Poller> getActiveV3Pollers() { return entryManager.getActiveV3Pollers(); } private Collection<V3Poller> getRecentV3Pollers() { return entryManager.getRecentV3Pollers(); } private Collection<V3Voter> getActiveV3Voters() { return entryManager.getActiveV3Voters(); } private Collection<V3Voter> getRecentV3Voters() { return entryManager.getRecentV3Voters(); } /** * Check the current policy to see if a request for a new V3Voter * should be rejected due to too many V3Voters already present. * @return true iff the number of active V3Voters is already at or * above the limit. */ public boolean tooManyV3Voters() { int maxVoters = CurrentConfig.getIntParam(V3Voter.PARAM_MAX_SIMULTANEOUS_V3_VOTERS, V3Voter.DEFAULT_MAX_SIMULTANEOUS_V3_VOTERS); // todo(bhayes): Generating the list just to get the size. int activeVoters = getActiveV3Voters().size(); if (activeVoters >= maxVoters) { theLog.info("Maximum number of active voters is " + maxVoters + "; " + activeVoters + " are already running."); return true; } return false; } // Used by V3PollStatus. // todo(bhayes): Should be on EntryManager, and synchronizing there. public synchronized Collection getV3Pollers() { Collection polls = new ArrayList(); polls.addAll(getActiveV3Pollers()); polls.addAll(getRecentV3Pollers()); return polls; } // Used by V3PollStatus // todo(bhayes): Should be on EntryManager, and synchronizing there. public synchronized Collection getV3Voters() { Collection polls = new ArrayList(); polls.addAll(getActiveV3Voters()); polls.addAll(getRecentV3Voters()); return polls; } // Used by V3PollStatus public BasePoll getPoll(String key) { PollManagerEntry pme = getCurrentOrRecentV3PollEntry(key); if(pme != null) { return pme.getPoll(); } return null; } /** * remove the poll represented by the given key from the poll table and * return it. * @param key the String representation of the polls key * @return Poll the poll if found or null */ BasePoll removePoll(String key) { PollManagerEntry pme = entryManager.removePoll(key); return (pme != null) ? pme.poll : null; } void addPoll(BasePoll p) { entryManager.addPoll(p); } // Used only in TestPollManager boolean isPollActive(String key) { PollManagerEntry pme = getPollManagerEntry(key); return (pme != null) ? pme.isPollActive() : false; } // Used only in TestPollManager boolean isPollClosed(String key) { return entryManager.isPollClosed(key); } // Used only in TestPollManager boolean isPollSuspended(String key) { PollManagerEntry pme = getPollManagerEntry(key); return (pme != null) ? pme.isPollSuspended() : false; } // Used only in TestPollManager static BasePoll makeTestPoll(LcapMessage msg) throws ProtocolException { if(theManager == null) { theManager = new PollManager(); } return theManager.makePoll(msg); } long getSlowestHashSpeed() { return theSystemMetrics.getSlowestHashSpeed(); } long getBytesPerMsHashEstimate() throws SystemMetrics.NoHashEstimateAvailableException { return theSystemMetrics.getBytesPerMsHashEstimate(); } // Used only in V1PollFactory. public boolean hasPoll(String key) { return entryManager.hasPoll(key); } public V3PollStatusAccessor getV3Status() { return v3Status; } // ---------------- Callbacks ----------------------------------- class RouterMessageHandler implements LcapRouter.MessageHandler { public void handleMessage(LcapMessage msg) { theLog.debug3("received from router message:" + msg.toString()); try { handleIncomingMessage(msg); } catch (IOException ex) { theLog.error("handleIncomingMessage() threw", ex); } } } /** * <p>PollManagerEntry: </p> * <p>Description: Class to represent the data store in the polls table. * @version 1.0 */ public static class PollManagerEntry { private BasePoll poll; private PollSpec spec; private int type; private Deadline pollDeadline; private Deadline deadline; private String key; PollManagerEntry(BasePoll p) { poll = p; spec = p.getPollSpec(); type = p.getPollSpec().getPollType(); key = p.getKey(); pollDeadline = p.getDeadline(); deadline = null; } boolean isPollActive() { return poll.isPollActive(); } boolean isPollCompleted() { return poll.isPollCompleted(); } boolean isPollSuspended() { if (isV3Poll()) return false; return poll.getVoteTally().stateIsSuspended(); } synchronized void setPollCompleted() { // todo(bhayes): Why is this synchronized? if (!isV3Poll()) { PollTally tally = poll.getVoteTally(); tally.tallyVotes(); } } synchronized void setPollSuspended() { // todo(bhayes): Why is this synchronized? poll.getVoteTally().setStateSuspended(); if(deadline != null) { deadline.expire(); deadline = null; } } public String getStatusString() { // Hack for V3. if (isV3Poll()) { return poll.getStatusString(); } else { if (isPollCompleted()) { return poll.getVoteTally().getStatusString(); } else if(isPollActive()) { return "Active"; } else if(isPollSuspended()) { return "Repairing"; } return "Unknown"; } } public String getTypeString() { return Poll.POLL_NAME[type]; } public String getShortKey() { return(key.substring(0,10)); } public String getKey() { return key; } public BasePoll getPoll() { return poll; } public int getType() { return type; } public PollSpec getPollSpec() { return spec; } public Deadline getPollDeadline() { return pollDeadline; } public Deadline getDeadline() { return deadline; } public boolean isSamePoll(PollSpec otherSpec) { if(this.type == otherSpec.getPollType()) { return this.spec.getCachedUrlSet().equals(otherSpec.getCachedUrlSet()); } return false; } /** * Convenience method * @return True iff this is a V3 poll. */ // XXX: V3 -- Remove when V1 polling is no longer supported. public boolean isV3Poll() { return (this.type == Poll.V3_POLL); } } /* * XXX: This is a temporary class to hold AU-specific status for * V3 polls. Eventually, the goal is to replace the current * node and poll history with a centralized V3-centric poll * history mechanism. Until then, this in-memory structure will * hold poll history for V3 AUs between reboots. */ public class V3PollStatusAccessor { HashMap map; long nextPollStartTime = -1; public V3PollStatusAccessor() { map = new HashMap(); } private V3PollStatusAccessorEntry getEntry(String auId) { V3PollStatusAccessorEntry e = (V3PollStatusAccessorEntry)map.get(auId); if (e == null) { e = new V3PollStatusAccessorEntry(); map.put(auId, e); } return e; } /** * Set the last completed V3 poll time for an AU. * * @param auId The ID of the Archival Unit. * @param lastPollTime The timestamp of the last completed V3 poll. */ public void setLastPollTime(String auId, long lastPollTime) { getEntry(auId).lastPollTime = lastPollTime; } /** * Get the last completed V3 poll time for an AU. * * @param auId The ID of the Archival Unit. * @return The timestamp of the last completed V3 poll. */ public long getLastPollTime(String auId) { return getEntry(auId).lastPollTime; } /** * Increment the number of completed V3 polls for an AU. * * @param auId The ID of the Archival Unit. */ public void incrementNumPolls(String auId) { getEntry(auId).numPolls++; } /** * Return the number of polls (since the last restart) for * an Archival Unit. * * @param auId The ID of the Archival Unit. * @return The number of completed V3 polls since the last * daemon restart. */ public int getNumPolls(String auId) { return getEntry(auId).numPolls; } /** * Set the percent agreement for an archival unit as of the * last completed V3 poll. * * @param auId The ID of the Archival Unit. * @param agreement The percent agreement as of the last completed V3 poll. */ public void setAgreement(String auId, float agreement) { getEntry(auId).agreement = agreement; } /** * Return the percent agreement for an archival unit as of the last * completed V3 poll. * * @param auId The ID of the Archival Unit. * @return The percent agreement as of the last completed V3 poll. */ public float getAgreement(String auId) { return getEntry(auId).agreement; } public void setNextPollStartTime(Deadline when) { if (when == null) { nextPollStartTime = -1; } else { nextPollStartTime = when.getExpirationTime(); } } public Deadline getNextPollStartTime() { if (nextPollStartTime > -1) { return Deadline.restoreDeadlineAt(nextPollStartTime); } else { return null; } } /** * Clear the poll history map. */ public void clear() { map.clear(); } } /* * Just a struct to hold status information per-au */ // CR: Seth thinks this is redundant private static class V3PollStatusAccessorEntry { public long lastPollTime = -1; public int numPolls = 0; public float agreement = 0.0f; } /** LOCKSS Runnable responsible for occasionally scanning for AUs that * need polls. */ private class PollStarter extends LockssRunnable { static final String PRIORITY_PARAM_POLLER = "Poller"; static final int PRIORITY_DEFAULT_POLLER = Thread.NORM_PRIORITY - 1; private LockssDaemon lockssDaemon; private PollManager pollManager; private volatile boolean goOn = true; public PollStarter(LockssDaemon lockssDaemon, PollManager pollManager) { super("PollStarter"); this.lockssDaemon = lockssDaemon; this.pollManager = pollManager; } public void lockssRun() { // Triggur the LockssRun thread watchdog on exit. triggerWDogOnExit(true); setPriority(PRIORITY_PARAM_POLLER, PRIORITY_DEFAULT_POLLER); if (goOn) { try { theLog.debug("Waiting until AUs started"); lockssDaemon.waitUntilAusStarted(); Deadline initial = Deadline.in(pollStartInitialDelay); pollManager.getV3Status().setNextPollStartTime(initial); initial.sleep(); } catch (InterruptedException e) { // just wakeup and check for exit } } while (goOn) { pollManager.getV3Status().setNextPollStartTime(null); try { startOnePoll(); } catch (RuntimeException e) { // Can happen if AU deactivated recently theLog.debug2("Error starting poll", e); // Avoid tight loop if startOnePoll() throws. Just being extra // cautious in case another bug similar to Roundup 4091 arises. try { Deadline errorWait = Deadline.in(Constants.MINUTE); errorWait.sleep(); } catch (InterruptedException ign) { // ignore } } catch (InterruptedException e) { // check goOn } } } public void stopPollStarter() { goOn = false; interruptThread(); } } boolean startOnePoll() throws InterruptedException { if (!enableV3Poller) { startOneWait.expireIn(paramMaxPollersSleep); } else { // todo(bhayes): Generating the list just to get the size. int activePollers = getActiveV3Pollers().size(); if (activePollers >= maxSimultaneousPollers) { startOneWait.expireIn(paramMaxPollersSleep); } else { PollReq req = pollQueue.nextReq(); if (req != null) { startPoll(req); // todo(bhayes): This seems odd; I would expect that this // would be done inside nextReq(). pollQueue.cancelAuPolls(req.au); return true; } else { startOneWait.expireIn(paramQueueEmptySleep); } } } v3Status.setNextPollStartTime(startOneWait); while (!startOneWait.expired()) { try { startOneWait.sleep(); } catch (InterruptedException e) { // just wakeup and check } } return false; } public List<String> getAutoPollAuClasses() { return autoPollAuClassess; } public List<ArchivalUnit> getPendingQueueAus() { return pollQueue.getPendingQueueAus(); } public void enqueueHighPriorityPoll(ArchivalUnit au, PollSpec spec) throws NotEligibleException { if (au.getAuId() != spec.getAuId()) { throw new IllegalArgumentException("auId in au \""+au.getAuId() +"\" does not match auId in spec \"" +spec.getAuId()+"\""); } PollReq req = new PollManager.PollReq(au) .setPollSpec(spec) .setPriority(2); enqueueHighPriorityPoll(req); } private void enqueueHighPriorityPoll(PollReq req) throws NotEligibleException { theLog.debug2("enqueueHighPriorityPoll(" + req + ")"); if (!req.isHighPriority()) { throw new IllegalArgumentException( "High priority polls must have a positive priority: "+req); } // the check will throw NotEligibleException with an appropriate message. checkEligibleForPoll(req); pollQueue.enqueueHighPriorityPoll(req); } void needRebuildPollQueue() { // Expiration of these timers causes nextReq() to rebuild the poll // queue the next time it's called. As it doesn't trigger an immediate // event, there's no need for a short delay. pollQueue.needRebuildPollQueue(); startOneWait.expire(); } // testing will override. protected List<ArchivalUnit> weightedRandomSelection(Map<ArchivalUnit, Double> weightMap, int n) { return (List<ArchivalUnit>)CollectionUtil. weightedRandomSelection(weightMap, n); } /** Used to convey reason an AU is ineligible to be polled to clients for * logging/display */ public class NotEligibleException extends Exception { public NotEligibleException(String msg) { super(msg); } } private boolean isEligibleForPoll(PollReq req) { try { checkEligibleForPoll(req); return true; } catch (NotEligibleException e) { return false; } } // todo(bhayes): DebugPanel calls this, assuming the default is a V3 // Content Poll. On the other hand, it also creates an explicit // PollSpec for enqueueHighPriorityPoll. It feels as if in this API // the two calls should be parallel. Also possible would be for // checkEligibleForPoll to let the caller know if a high priority // would be eligible when a default priority would not. /** * @return true iff the ArchivalUnit would be eligible for a * "default" V3 Content Poll. */ public void checkEligibleForPoll(ArchivalUnit au) throws NotEligibleException { // todo(bhayes): This is creating a PollReq with no PollSpec, // purely for the prupose of checking eligibility, which happens // to not use the spec. checkEligibleForPoll(new PollReq(au)); } private void checkEligibleForPoll(PollReq req) throws NotEligibleException { ArchivalUnit au = req.au; // If a poll is already running, don't start another one. if (isPollRunning(au)) { throw new NotEligibleException("AU is already running a poll."); } checkAuClassAllowed(req); if (req.isHighPriority()) { return; } // Following tests suppressed for high priority (manually enqueued) // polls AuState auState = AuUtil.getAuState(req.au); // Does AU want to be polled? if (!au.shouldCallTopLevelPoll(auState)) { throw new NotEligibleException("AU does not want to be polled."); } // Do not call polls on AUs that have not crawled, UNLESS that AU // is marked pubdown. if (!auState.hasCrawled() && !AuUtil.isPubDown(au)) { theLog.debug3("Not crawled or down, not calling a poll on " + au); throw new NotEligibleException("AU has not crawled and is not marked down."); } long sinceLast = TimeBase.msSince(auState.getLastPollAttempt()); if (sinceLast < paramMinPollAttemptInterval) { String msg = "Poll attempted too recently (" + StringUtil.timeIntervalToString(sinceLast) + " < " + StringUtil.timeIntervalToString(paramMinPollAttemptInterval) + ")."; theLog.debug3(msg + " " + au); throw new NotEligibleException(msg); } } void checkAuClassAllowed(PollReq req) throws NotEligibleException { if (autoPollAuClassess.contains("all")) { return; } ArchivalUnit au = req.au; if (pluginMgr.isInternalAu(au) && autoPollAuClassess.contains("internal")) { return; } if (req.isHighPriority() && autoPollAuClassess.contains("priority")) { return; } throw new NotEligibleException("Only AU classes {" + StringUtil.separatedString(autoPollAuClassess, ", ") + "} are allowed to poll."); } /** Return a number proportional to the desirability of calling a poll on * the AU. */ double pollWeight(ArchivalUnit au) throws NotEligibleException { checkEligibleForPoll(au); AuState auState = AuUtil.getAuState(au); long lastEnd = auState.getLastTopLevelPollTime(); long pollInterval; if (pollIntervalAgreementCurve != null && pollIntervalAgreementLastResult.contains(auState.getLastPollResult())) { int agreePercent = (int)Math.round(auState.getV3Agreement() * 100.0); pollInterval = (int)pollIntervalAgreementCurve.getY(agreePercent); } else { pollInterval = paramToplevelPollInterval; } int numrisk = numPeersWithAuAtRisk(au); if (pollIntervalAtRiskPeersCurve != null) { int atRiskInterval = (int)pollIntervalAtRiskPeersCurve.getY(numrisk); if (atRiskInterval >= 0) { pollInterval = Math.min(pollInterval, atRiskInterval); } } if (lastEnd + pollInterval > TimeBase.nowMs()) { return 0.0; } long num = TimeBase.msSince(lastEnd); long denom = pollInterval + auState.getPollDuration(); double weight = (double)num / (double)denom; if (pollWeightAtRiskPeersCurve != null) { weight *= pollWeightAtRiskPeersCurve.getY(numrisk); } return weight; } int numPeersWithAuAtRisk(ArchivalUnit au) { Set peers = getPeersWithAuAtRisk(au); if (peers == null) { return 0; } return peers.size(); } boolean startPoll(PollReq req) { ArchivalUnit au = req.getAu(); if (isPollRunning(au)) { theLog.debug("Attempted to start poll when one is already running: " + au.getName()); return false; } // todo(bhayes): Should this be using the spec from the request? PollSpec spec = new PollSpec(au.getAuCachedUrlSet(), Poll.V3_POLL); theLog.debug("Calling a V3 poll on AU " + au); if (callPoll(au, spec) == null) { theLog.debug("pollManager.callPoll returned null. Failed to call " + "a V3 poll on " + au); return false; } // Add a delay to throttle poll starting. The delay is the sum of // the scomm timeout and an additional number of milliseconds. if (enablePollStarterThrottle) { try { Deadline dontStartPollBefore = Deadline.in(interPollStartDelay); v3Status.setNextPollStartTime(dontStartPollBefore); dontStartPollBefore.sleep(); } catch (InterruptedException ex) { // Just proceed to the next poll. } v3Status.setNextPollStartTime(null); } return true; } private Set recalcingAus = Collections.synchronizedSet(new HashSet()); /** Remember that we have scheduled a hash to recalculate the hash time * for this AU */ public void addRecalcAu(ArchivalUnit au) { recalcingAus.add(au); } /** Done with hash to recalculate the hash time for this AU */ public void removeRecalcAu(ArchivalUnit au) { recalcingAus.remove(au); } /** Return true if already have scheduled a hash to recalculate the hash * time for this AU */ public boolean isRecalcAu(ArchivalUnit au) { return recalcingAus.contains(au); } public enum EventCtr {Polls, Invitations, Accepted, Declined, Voted, ReceivedVoteReceipt, }; Map<EventCtr,MutableInt> eventCounters = new EnumMap<EventCtr,MutableInt>(EventCtr.class); Map<PollNak,MutableInt> voterNakEventCounters = new EnumMap<PollNak,MutableInt>(PollNak.class); int[] pollEndEventCounters = new int[POLLER_STATUS_STRINGS.length]; public void countEvent(EventCtr c) { synchronized (eventCounters) { MutableInt n = eventCounters.get(c); if (n == null) { n = new MutableInt(); eventCounters.put(c, n); } n.add(1); } } public void countVoterNakEvent(PollNak nak) { synchronized (voterNakEventCounters) { MutableInt n = voterNakEventCounters.get(nak); if (n == null) { n = new MutableInt(); voterNakEventCounters.put(nak, n); } n.add(1); } countEvent(EventCtr.Declined); } public void countPollEndEvent(int status) { synchronized (pollEndEventCounters) { pollEndEventCounters[status]++; } } public int getEventCount(EventCtr c) { synchronized (eventCounters) { MutableInt n = eventCounters.get(c); return n == null ? 0 : n.intValue(); } } public int getVoterNakEventCount(PollNak c) { synchronized (voterNakEventCounters) { MutableInt n = voterNakEventCounters.get(c); return n == null ? 0 : n.intValue(); } } public int getPollEndEventCount(int status) { return pollEndEventCounters[status]; } }
src/org/lockss/poller/PollManager.java
/* * $Id: PollManager.java,v 1.243 2012-07-20 17:43:24 barry409 Exp $ */ /* Copyright (c) 2000-2012 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.poller; import static org.lockss.util.Constants.SECOND; import static org.lockss.util.Constants.MINUTE; import static org.lockss.util.Constants.HOUR; import static org.lockss.util.Constants.DAY; import static org.lockss.util.Constants.WEEK; import java.io.*; import java.util.*; import EDU.oswego.cs.dl.util.concurrent.*; import org.apache.commons.collections.map.*; import org.apache.commons.lang.builder.CompareToBuilder; import org.apache.commons.lang.mutable.MutableInt; import org.lockss.alert.*; import org.lockss.app.*; import org.lockss.config.*; import org.lockss.daemon.*; import org.lockss.daemon.status.StatusService; import org.lockss.hasher.HashService; import org.lockss.plugin.*; import org.lockss.poller.v3.*; import org.lockss.poller.v3.V3Serializer.PollSerializerException; import org.lockss.protocol.*; import org.lockss.protocol.psm.*; import org.lockss.protocol.V3LcapMessage.PollNak; import org.lockss.state.*; import org.lockss.util.*; import static org.lockss.poller.v3.V3Poller.*; import static org.lockss.poller.v3.V3Voter.*; import static org.lockss.poller.v3.V3PollFactory.*; /** * <p>Class that manages the polling process.</p> * @author Claire Griffin * @version 1.0 */ // CR: Code review comments are marked with CR: public class PollManager extends BaseLockssDaemonManager implements ConfigurableManager { protected static Logger theLog = Logger.getLogger("PollManager"); static final String PREFIX = Configuration.PREFIX + "poll."; static final String PARAM_RECENT_EXPIRATION = PREFIX + "expireRecent"; static final long DEFAULT_RECENT_EXPIRATION = DAY; /** If true, empty poll state directories found at startup will be * deleted. */ static final String PARAM_DELETE_INVALID_POLL_STATE_DIRS = PREFIX + "deleteInvalidPollStateDirs"; static final boolean DEFAULT_DELETE_INVALID_POLL_STATE_DIRS = true; /** If true, discard saved poll state at startup (i.e., don't restore * polls that were running before exit). */ static final String PARAM_DISCARD_SAVED_POLLS = PREFIX + "discardSavedPolls"; static final boolean DEFAULT_DISCARD_SAVED_POLLS = false; public static final String PARAM_ENABLE_V3_POLLER = org.lockss.poller.v3.V3PollFactory.PARAM_ENABLE_V3_POLLER; public static final boolean DEFAULT_ENABLE_V3_POLLER = org.lockss.poller.v3.V3PollFactory.DEFAULT_ENABLE_V3_POLLER; public static final String PARAM_ENABLE_V3_VOTER = org.lockss.poller.v3.V3PollFactory.PARAM_ENABLE_V3_VOTER; public static final boolean DEFAULT_ENABLE_V3_VOTER = org.lockss.poller.v3.V3PollFactory.DEFAULT_ENABLE_V3_VOTER; /** The classes of AUs for which polls should be run. May be a singleton * or list of: * <dl> * <dt>All<dd> All AUs * <dt>Internal<dd> Internal AUs (plugin registries) * <dt>Priority<dd> Poll that have been requested from DebugPanel * </dl> */ public static final String PARAM_AUTO_POLL_AUS = PREFIX + "autoPollAuClassess"; public static final List<String> DEFAULT_AUTO_POLL_AUS = ListUtil.list("All"); // Poll starter public static final String PARAM_START_POLLS_INITIAL_DELAY = PREFIX + "pollStarterInitialDelay"; public static final long DEFAULT_START_POLLS_INITIAL_DELAY = MINUTE * 10; /** Minimum interval between poll attempts on an AU. This takes effect * even if the poll failed to start. */ public static final String PARAM_MIN_POLL_ATTEMPT_INTERVAL = PREFIX + "minPollAttemptInterval"; public static final long DEFAULT_MIN_POLL_ATTEMPT_INTERVAL = 4 * HOUR; /** The time, in ms, that will be added between launching new polls. * This time is added to the channel timeout time provided by SCOMM. */ public static final String PARAM_ADDED_POLL_DELAY = PREFIX + "pollStarterAdditionalDelayBetweenPolls"; public static final long DEFAULT_ADDED_POLL_DELAY = SECOND; /** Max interval between recalculating poll queue order */ public static final String PARAM_REBUILD_POLL_QUEUE_INTERVAL = PREFIX + "queueRecalcInterval"; static final long DEFAULT_REBUILD_POLL_QUEUE_INTERVAL = HOUR; /** Interval to sleep when queue empty, before recalc. */ public static final String PARAM_QUEUE_EMPTY_SLEEP = PREFIX + "queueEmptySleep"; static final long DEFAULT_QUEUE_EMPTY_SLEEP = 30 * MINUTE; /** Interval to sleep when max number of pollers are active, before * checking again. */ public static final String PARAM_MAX_POLLERS_SLEEP = PREFIX + "maxPollersSleep"; static final long DEFAULT_MAX_POLLERS_SLEEP = 10 * MINUTE; /** Size of poll queue. */ public static final String PARAM_POLL_QUEUE_MAX = PREFIX + "pollQueueMax"; static final int DEFAULT_POLL_QUEUE_MAX = 20; /** * If set, poll starting will be throttled. This is the default. */ public static final String PARAM_ENABLE_POLL_STARTER_THROTTLE = PREFIX + "enablePollStarterThrottle"; public static boolean DEFAULT_ENABLE_POLL_STARTER_THROTTLE = true; /** If true, state machines are run in their own thread */ public static final String PARAM_PSM_ASYNCH = PREFIX + "psmAsynch"; public static final boolean DEFAULT_PSM_ASYNCH = true; /** Interval after which we'll try inviting peers that we think are not * in our polling group */ public static final String PARAM_WRONG_GROUP_RETRY_TIME = PREFIX + "wrongGroupRetryTime"; public static final long DEFAULT_WRONG_GROUP_RETRY_TIME = 4 * WEEK; static final String V3PREFIX = PREFIX + "v3."; /** Curve expressing desired inter-poll interval based on last agreement * value */ public static final String PARAM_POLL_INTERVAL_AGREEMENT_CURVE = V3PREFIX + "pollIntervalAgreementCurve"; public static final String DEFAULT_POLL_INTERVAL_AGREEMENT_CURVE = null; /** Previous poll results for which we want to apply {@link * #PARAM_POLL_INTERVAL_AGREEMENT_CURVE} */ public static final String PARAM_POLL_INTERVAL_AGREEMENT_LAST_RESULT = V3PREFIX + "pollIntervalAgreementLastResult"; public static final List DEFAULT_POLL_INTERVAL_AGREEMENT_LAST_RESULT = Collections.EMPTY_LIST; /** Curve expressing desired inter-poll interval based on number of * at-risk instances of AU */ public static final String PARAM_POLL_INTERVAL_AT_RISK_PEERS_CURVE = V3PREFIX + "pollIntervalAtRiskPeersCurve"; public static final String DEFAULT_POLL_INTERVAL_AT_RISK_PEERS_CURVE = null; /** Curve expressing poll weight multiplier based on number of at-risk * instances of AU */ public static final String PARAM_POLL_WEIGHT_AT_RISK_PEERS_CURVE = V3PREFIX + "pollWeightAtRiskPeersCurve"; public static final String DEFAULT_POLL_WEIGHT_AT_RISK_PEERS_CURVE = null; /** Curve giving reset interval of NoAuPeerIdSet as a function of AU * age */ public static final String PARAM_NO_AU_RESET_INTERVAL_CURVE = V3PREFIX + "noAuResetIntervalCurve"; public static final String DEFAULT_NO_AU_RESET_INTERVAL_CURVE = "[1w,2d],[1w,7d],[30d,7d],[30d,30d],[100d,30d],[100d,50d]"; /** Target poll interval if no other mechanism is used */ public static final String PARAM_TOPLEVEL_POLL_INTERVAL = V3PREFIX + "toplevelPollInterval"; public static final long DEFAULT_TOPLEVEL_POLL_INTERVAL = 10 * WEEK; public static class AuPeersMap extends HashMap<String,Set<PeerIdentity>> {} public static class Peer2PeerMap extends HashMap<PeerIdentity,PeerIdentity> {} // Items are moved between thePolls and theRecentPolls, so it's simplest // to synchronize all accesses on a single object, pollMapLock. private static HashMap<String,PollManagerEntry> thePolls = new HashMap(); private static FixedTimedMap theRecentPolls = new FixedTimedMap(DEFAULT_RECENT_EXPIRATION); private static Object pollMapLock = thePolls; private static PollManager theManager = null; private static LcapRouter.MessageHandler m_msgHandler; private static IdentityManager theIDManager; private static HashService theHashService; private static LcapRouter theRouter = null; private AlertManager theAlertManager = null; private PluginManager pluginMgr = null; private static SystemMetrics theSystemMetrics = null; private AuEventHandler auEventHandler; // CR: serializedPollers and serializedVoters s.b. updated as new // polls/votes are created, in case AU is deactivated & reactivated private HashMap serializedPollers; private HashMap serializedVoters; private V3PollStatusAccessor v3Status; private boolean deleteInvalidPollStateDirs = DEFAULT_DELETE_INVALID_POLL_STATE_DIRS; private long paramToplevelPollInterval = DEFAULT_TOPLEVEL_POLL_INTERVAL; private long pollStartInitialDelay = DEFAULT_START_POLLS_INITIAL_DELAY; private boolean enableV3Poller = DEFAULT_ENABLE_V3_POLLER; private int maxSimultaneousPollers = DEFAULT_MAX_SIMULTANEOUS_V3_POLLERS; private PollStarter pollStarter; private boolean isPollStarterEnabled = false; private boolean enablePollStarterThrottle = DEFAULT_ENABLE_POLL_STARTER_THROTTLE; private long paramRebuildPollQueueInterval = DEFAULT_REBUILD_POLL_QUEUE_INTERVAL; private long paramQueueEmptySleep = DEFAULT_QUEUE_EMPTY_SLEEP; private long paramMaxPollersSleep = DEFAULT_MAX_POLLERS_SLEEP; private int paramPollQueueMax = DEFAULT_POLL_QUEUE_MAX; private long interPollStartDelay = DEFAULT_ADDED_POLL_DELAY; private long paramMinPollAttemptInterval = DEFAULT_MIN_POLL_ATTEMPT_INTERVAL; private double paramMinPercentForRepair = V3Voter.DEFAULT_MIN_PERCENT_AGREEMENT_FOR_REPAIRS; private boolean paramDiscardSavedPolls = DEFAULT_DISCARD_SAVED_POLLS; private boolean isAsynch = DEFAULT_PSM_ASYNCH; private long wrongGroupRetryTime = DEFAULT_WRONG_GROUP_RETRY_TIME; private IpFilter noInvitationSubnetFilter = null; private CompoundLinearSlope v3InvitationWeightAgeCurve = null; // private CompoundLinearSlope v3InvitationWeightSafetyCurve = null; private CompoundLinearSlope v3AcceptProbabilitySafetyCurve = null; private CompoundLinearSlope v3NominationWeightAgeCurve = null; private CompoundLinearSlope pollIntervalAgreementCurve = null; private CompoundLinearSlope pollIntervalAtRiskPeersCurve = null; private CompoundLinearSlope pollWeightAtRiskPeersCurve = null; private Set pollIntervalAgreementLastResult = SetUtil.theSet(DEFAULT_POLL_INTERVAL_AGREEMENT_LAST_RESULT); private long paramWillingRepairerLiveness = DEFAULT_WILLING_REPAIRER_LIVENESS; private double paramAcceptRepairersPollPercent = DEFAULT_ACCEPT_REPAIRERS_POLL_PERCENT; private double paramInvitationWeightAtRisk = DEFAULT_INVITATION_WEIGHT_AT_RISK; private double paramInvitationWeightAlreadyRepairable = DEFAULT_INVITATION_WEIGHT_ALREADY_REPAIRABLE; private CompoundLinearSlope v3NoAuResetIntervalCurve = null; private CompoundLinearSlope v3VoteRetryIntervalDurationCurve = null; private Peer2PeerMap reputationTransferMap; private AuPeersMap atRiskAuInstances = null; // If true, restore V3 Pollers private boolean enablePollers = DEFAULT_ENABLE_V3_POLLER; // If true, restore V3 Voters private boolean enableVoters = DEFAULT_ENABLE_V3_VOTER; private List<String> autoPollAuClassess = DEFAULT_AUTO_POLL_AUS; // Executor used to carry out serialized poll operations. // Implementations include a queued poll executor and a pooled poll executor. private PollRunner theTaskRunner; // our configuration variables protected long m_recentPollExpireTime = DEFAULT_RECENT_EXPIRATION; Deadline startOneWait = Deadline.in(0); static class PollReq { ArchivalUnit au; int priority = 0; PollSpec spec; public PollReq(ArchivalUnit au) { this.au = au; } public PollReq setPriority(int val) { priority = val; return this; } public PollReq setPollSpec(PollSpec spec) { this.spec = spec; return this; } public ArchivalUnit getAu() { return au; } public int getPriority() { return priority; } public boolean isHighPriority() { return priority > 0; } public String toString() { return "[PollReq: " + au + ", pri: " + priority + "]"; } } /** * Keep an ordered list of PollReq. High-priority requests will * always be done first, followed by requests generated by tne * PollManager itself. */ class PollQueue { Deadline timeToRebuildPollQueue = Deadline.in(0); Object queueLock = new Object(); // lock for pollQueue /** * The processed list of poll requests, in the order they will be executed. */ private List<PollReq> pollQueue = new ArrayList<PollReq>(); /** * The high-priority pending requests. */ private Map<ArchivalUnit,PollReq> highPriorityPollRequests = Collections.synchronizedMap(new ListOrderedMap()); /** * Make a PollQueue. */ PollQueue() { } /** * @return the next PollReq to start. */ public PollReq nextReq() throws InterruptedException { boolean rebuilt = rebuildPollQueueIfNeeded(); PollReq req = nextReqFromBuiltQueue(); if (req != null) { return req; } if (!rebuilt) { rebuildPollQueue(); } return nextReqFromBuiltQueue(); } /** * Request a poll, as specified by the PollReq, sooner than might * otherwise happen. Overrides any previous requests. * @param req the PollReq */ public void enqueueHighPriorityPoll(PollReq req) { highPriorityPollRequests.put(req.au, req); needRebuildPollQueue(); } /** * Remove any requests previously registered using * enqueueHighPriorityPoll. * @param au the ArchivalUnit */ public void cancelAuPolls(ArchivalUnit au) { // todo(bhayes): This doesn't force a rebuild? Why not? highPriorityPollRequests.remove(au); } /** * Invalidate the current list of pending polls. */ public void needRebuildPollQueue() { timeToRebuildPollQueue.expire(); } /** * @return a List of ArchivalUnits in the queue to poll. */ public List<ArchivalUnit> getPendingQueueAus() { rebuildPollQueueIfNeeded(); ArrayList<ArchivalUnit> aus = new ArrayList<ArchivalUnit>(); synchronized (queueLock) { for (PollReq req : pollQueue) { aus.add(req.getAu()); } } return aus; } /** * Pop the next PollReq from the queue, and return it. */ private PollReq nextReqFromBuiltQueue() { synchronized (queueLock) { if (theLog.isDebug3()) { theLog.debug3("nextReqFromBuiltQueue(), " + pollQueue.size() + " in queue"); } while (!pollQueue.isEmpty()) { PollReq req = pollQueue.remove(0); // todo(bhayes): Why is this check here, rather than in // startOnePoll()? Also, if this is in the high priority // list, what keeps it from getting back in the rebuilt // list? // ignore deleted AUs if (pluginMgr.isActiveAu(req.getAu())) { return req; } } if (theLog.isDebug3()) { theLog.debug3("nextReqFromBuiltQueue(): null"); } return null; } } /** * Rebuild the poll queue, if needed. * @return true iff the queue was rebuilt. */ private boolean rebuildPollQueueIfNeeded() { synchronized (queueLock) { if (timeToRebuildPollQueue.expired()) { rebuildPollQueue(); return true; } return false; } } // package level for testing; todo(bhayes): The tests should // probably be calling needRebuildPollQueue and using the other // public methods. /** * Force poll queue to be rebuilt from the pending high-priority * requests and the PollManager's list of ArchivalUnits. */ void rebuildPollQueue() { timeToRebuildPollQueue.expireIn(paramRebuildPollQueueInterval); long startTime = TimeBase.nowMs(); rebuildPollQueue0(); theLog.debug("rebuildPollQueue(): "+ (TimeBase.nowMs() - startTime)+"ms"); } /** * REALLY force poll queue to be rebuilt from the pending * high-priority requests and the PollManager's list of * ArchivalUnits. */ private void rebuildPollQueue0() { synchronized (queueLock) { pollQueue.clear(); // XXX Until have real priority system, just add these in the // order they were created. Set<ArchivalUnit> highPriorityAus = new HashSet<ArchivalUnit>(); synchronized (highPriorityPollRequests) { for (PollReq req : highPriorityPollRequests.values()) { highPriorityAus.add(req.au); if (isEligibleForPoll(req)) { pollQueue.add(req); } } } int availablePollCount = paramPollQueueMax - pollQueue.size(); if (availablePollCount > 0) { Map<ArchivalUnit, Double> weightMap = new HashMap<ArchivalUnit, Double>(); for (ArchivalUnit au : pluginMgr.getAllAus()) { try { if (highPriorityAus.contains(au)) { // already tried above; might or might not have been added. continue; } try { double weight = pollWeight(au); if (weight > 0.0) { weightMap.put(au, weight); } } catch (NotEligibleException e) { if (theLog.isDebug3()) { theLog.debug3("Not eligible for poll: " + au); } } } catch (RuntimeException e) { theLog.warning("Checking for pollworthiness: " + au.getName(), e); // ignore AU if it caused an error } } // weightedRandomSelection throws if the count is larger // than the size. int count = Math.min(weightMap.size(), availablePollCount); if (!weightMap.isEmpty()) { List<ArchivalUnit> selected = weightedRandomSelection(weightMap, count); for (ArchivalUnit au : selected) { PollSpec spec = new PollSpec(au.getAuCachedUrlSet(), Poll.V3_POLL); PollReq req = new PollReq(au).setPollSpec(spec); pollQueue.add(req); } } } if (theLog.isDebug()) { theLog.debug("Poll queue: " + pollQueue); } } } } /** * The poll queue for ordering poll requests. */ protected PollQueue pollQueue = new PollQueue(); // The PollFactory instances PollFactory [] pf = { null, new V1PollFactory(), null, // new V2PollFactory(), new V3PollFactory(this), }; public PollManager() { } /** * start the poll manager. * @see org.lockss.app.LockssManager#startService() */ public void startService() { super.startService(); // Create a poll runner. theTaskRunner = new PollRunner(); // the services we use on an ongoing basis LockssDaemon theDaemon = getDaemon(); theIDManager = theDaemon.getIdentityManager(); theHashService = theDaemon.getHashService(); theAlertManager = theDaemon.getAlertManager(); pluginMgr = theDaemon.getPluginManager(); Configuration config = ConfigManager.getCurrentConfig(); if (config.containsKey(PARAM_AT_RISK_AU_INSTANCES)) { atRiskAuInstances = makeAuPeersMap(config.getList(PARAM_AT_RISK_AU_INSTANCES), theIDManager); } if (config.containsKey(PARAM_REPUTATION_TRANSFER_MAP)) { reputationTransferMap = makeReputationPeerMap(config.getList(PARAM_REPUTATION_TRANSFER_MAP), theIDManager); } // register a message handler with the router theRouter = theDaemon.getRouterManager(); m_msgHandler = new RouterMessageHandler(); theRouter.registerMessageHandler(m_msgHandler); // get System Metrics theSystemMetrics = theDaemon.getSystemMetrics(); // register our status StatusService statusServ = theDaemon.getStatusService(); statusServ.registerStatusAccessor(V3PollStatus.POLLER_STATUS_TABLE_NAME, new V3PollStatus.V3PollerStatus(this)); statusServ.registerOverviewAccessor(V3PollStatus.POLLER_STATUS_TABLE_NAME, new V3PollStatus.PollOverview(this)); statusServ.registerStatusAccessor(V3PollStatus.VOTER_STATUS_TABLE_NAME, new V3PollStatus.V3VoterStatus(this)); statusServ.registerOverviewAccessor(V3PollStatus.VOTER_STATUS_TABLE_NAME, new V3PollStatus.VoterOverview(this)); statusServ.registerStatusAccessor(V3PollStatus.POLLER_DETAIL_TABLE_NAME, new V3PollStatus.V3PollerStatusDetail(this)); statusServ.registerStatusAccessor(V3PollStatus.VOTER_DETAIL_TABLE_NAME, new V3PollStatus.V3VoterStatusDetail(this)); statusServ.registerStatusAccessor(V3PollStatus.ACTIVE_REPAIRS_TABLE_NAME, new V3PollStatus.V3ActiveRepairs(this)); statusServ.registerStatusAccessor(V3PollStatus.COMPLETED_REPAIRS_TABLE_NAME, new V3PollStatus.V3CompletedRepairs(this)); statusServ.registerStatusAccessor(V3PollStatus.NO_QUORUM_TABLE_NAME, new V3PollStatus.V3NoQuorumURLs(this)); statusServ.registerStatusAccessor(V3PollStatus.TOO_CLOSE_TABLE_NAME, new V3PollStatus.V3TooCloseURLs(this)); statusServ.registerStatusAccessor(V3PollStatus.AGREE_TABLE_NAME, new V3PollStatus.V3AgreeURLs(this)); statusServ.registerStatusAccessor(V3PollStatus.DISAGREE_TABLE_NAME, new V3PollStatus.V3DisagreeURLs(this)); statusServ.registerStatusAccessor(V3PollStatus.ERROR_TABLE_NAME, new V3PollStatus.V3ErrorURLs(this)); // register our AU event handler auEventHandler = new AuEventHandler.Base() { @Override public void auCreated(PluginManager.AuEvent event, ArchivalUnit au) { restoreAuPolls(au); } @Override public void auDeleted(PluginManager.AuEvent event, ArchivalUnit au) { cancelAuPolls(au); }}; pluginMgr.registerAuEventHandler(auEventHandler); // Maintain the state of V3 polls, since these do not use the V1 per-node // history mechanism. v3Status = new V3PollStatusAccessor(); // One time load of an in-memory map of AU IDs to directories. preloadStoredPolls(); // Enable the poll starter. enablePollStarter(); } private void enablePollStarter() { theLog.info("Starting PollStarter"); if (pollStarter != null) { theLog.debug("PollStarter already running. " + "Stopping old one first"); disablePollStarter(); } pollStarter = new PollStarter(getDaemon(), this); new Thread(pollStarter).start(); isPollStarterEnabled = true; } private void disablePollStarter() { if (pollStarter != null) { theLog.info("Stopping PollStarter"); pollStarter.stopPollStarter(); pollStarter.waitExited(Deadline.in(SECOND)); pollStarter = null; } isPollStarterEnabled = false; } /** * stop the poll manager * @see org.lockss.app.LockssManager#stopService() */ public void stopService() { disablePollStarter(); if (auEventHandler != null) { getDaemon().getPluginManager().unregisterAuEventHandler(auEventHandler); auEventHandler = null; } // unregister our status StatusService statusServ = getDaemon().getStatusService(); statusServ.unregisterStatusAccessor(V3PollStatus.POLLER_STATUS_TABLE_NAME); statusServ.unregisterOverviewAccessor(V3PollStatus.POLLER_STATUS_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.VOTER_STATUS_TABLE_NAME); statusServ.unregisterOverviewAccessor(V3PollStatus.VOTER_STATUS_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.POLLER_DETAIL_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.VOTER_DETAIL_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.ACTIVE_REPAIRS_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.COMPLETED_REPAIRS_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.NO_QUORUM_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.TOO_CLOSE_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.AGREE_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.DISAGREE_TABLE_NAME); statusServ.unregisterStatusAccessor(V3PollStatus.ERROR_TABLE_NAME); // unregister our router theRouter.unregisterMessageHandler(m_msgHandler); // Stop the poll runner. if (theTaskRunner != null) { theTaskRunner.stop(); } // null anything which might cause problems theTaskRunner = null; theIDManager = null; theHashService = null; theSystemMetrics = null; synchronized (pollMapLock) { thePolls.clear(); theRecentPolls.clear(); } v3Status.clear(); super.stopService(); } /** Cancel all polls on the specified AU. * @param au the AU */ void cancelAuPolls(ArchivalUnit au) { // first remove from queues pollQueue.cancelAuPolls(au); // collect polls to cancel Set<PollManagerEntry> toCancel = new HashSet(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { ArchivalUnit pau = pme.poll.getCachedUrlSet().getArchivalUnit(); if (pau == au && !pme.isPollCompleted()) { toCancel.add(pme); } } } // then actually cancel them while not holding lock for (PollManagerEntry pme : toCancel) { ArchivalUnit pau = pme.poll.getCachedUrlSet().getArchivalUnit(); theHashService.cancelAuHashes(pau); pme.poll.abortPoll(); } } /** * Call a poll. Used by NodeManagerImpl; V1 only. * @param pollspec the <code>PollSpec</code> that defines the subject of * the <code>Poll</code>. * @return the poll, if it was successfuly called, else null. */ public Poll callPoll(PollSpec pollspec) { if (pollspec.getProtocolVersion() != 1) { throw new IllegalArgumentException("V1 method called with: "+pollspec); } return callPoll0(pollspec); } /** * Call a poll. Used by PollStarter. * @param pollspec the <code>PollSpec</code> that defines the subject of * the <code>Poll</code>. * @param au * @return the poll, if it was successfuly called, else null. */ public Poll callPoll(ArchivalUnit au, PollSpec pollspec) { AuState auState = AuUtil.getAuState(au); auState.pollAttempted(); return callPoll0(pollspec); } private Poll callPoll0(PollSpec pollspec) { String errMsg = null; PollFactory pollFact = getPollFactory(pollspec); if (pollFact != null) { long duration = pollFact.calcDuration(pollspec, this); if (duration > 0) { try { PeerIdentity orig = theIDManager.getLocalPeerIdentity(pollspec.getProtocolVersion()); BasePoll thePoll = makePoller(pollspec, duration, orig); if (thePoll != null) { return thePoll; } else { theLog.debug("makePoller(" + pollspec + ") returned null"); } } catch (ProtocolException ex) { theLog.debug("Error in makePoller or callPoll", ex); } } else { errMsg = "Too busy"; theLog.debug("No duration within limit"); } } else { errMsg = "Unknown poll version: " + pollspec.getProtocolVersion(); } theLog.debug("Poll not started: " + errMsg + ", au: " + pollspec.getAuId()); return null; } /** * Is a poll of the given type and spec currently running * @param spec the PollSpec definining the location of the poll. * @return true if we have a poll which is running that matches pollspec * * @deprecated This method may be removed in a future release. */ public boolean isPollRunning(PollSpec spec) { synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (pme.isSamePoll(spec)) { return !pme.isPollCompleted(); } } } return false; } public boolean isPollRunning(ArchivalUnit au) { if (au == null || au.getAuId() == null) { throw new NullPointerException("Passed a null AU or AU with null ID " + "to isPollRunning!"); } synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (au.getAuId().equals(pme.getPollSpec().getAuId())) { // Keep looking until we find a V3Poller that is active, or // we run out of poll objects to examine. If we find an active // poller, return right away. if (pme.getPoll() instanceof V3Poller) { if (pme.isPollActive()) { return true; } } } } } return false; } /** Return the PollManagerEntry for the poll with the specified key. */ private PollManagerEntry getPollManagerEntry(String key) { synchronized (pollMapLock) { return thePolls.get(key); } } // Used in PollerStatus.getSummary, which is V1 code. /** Find the poll either in current or recent polls */ PollManagerEntry getCurrentOrRecentV1PollEntry(String key) { synchronized (pollMapLock) { PollManagerEntry pme = thePolls.get(key); if (pme == null) { pme = (PollManagerEntry)theRecentPolls.get(key); } if (pme != null && pme.isV3Poll()) { throw new IllegalStateException("Expected V1Poll: "+key); } return pme; } } /** Find the poll either in current or recent polls */ private PollManagerEntry getCurrentOrRecentV3PollEntry(String key) { synchronized (pollMapLock) { PollManagerEntry pme = thePolls.get(key); if (pme == null) { pme = (PollManagerEntry)theRecentPolls.get(key); } if (pme != null && !pme.isV3Poll()) { throw new IllegalStateException("Expected V3Poll: "+key); } return pme; } } // XXX: V3 -- Only required for V1 polls. public ActivityRegulator.Lock acquirePollLock(String key) { ActivityRegulator.Lock lock = null; PollManagerEntry pme = getCurrentOrRecentV1PollEntry(key); if(pme != null) { PollTally tally = pme.poll.getVoteTally(); if(tally != null) { lock = tally.getActivityLock(); tally.setActivityLock(null); } } return lock; } /** * suspend a poll while we wait for a repair * @param key the identifier key of the poll to suspend */ // XXX: V3 -- Only required for V1 polls. public void suspendPoll(String key) { PollManagerEntry pme; synchronized (pollMapLock) { pme = getCurrentOrRecentV1PollEntry(key); if (pme != null) { theRecentPolls.remove(key); thePolls.put(key, pme); pme.setPollSuspended(); } } if (pme == null) { theLog.debug2("ignoring suspend request for unknown key " + key); } else { theLog.debug("suspended poll " + key); } } /** * resume a poll that had been suspended for a repair and check the repair * @param replayNeeded true we now need to replay the poll results * @param key the key of the suspended poll */ // XXX: V3 -- Only required for V1 polls. public void resumePoll(boolean replayNeeded, String key, ActivityRegulator.Lock lock) { PollManagerEntry pme = getPollManagerEntry(key); if(pme == null) { theLog.debug2("ignoring resume request for unknown key " + key); return; } theLog.debug("resuming poll " + key); PollTally tally = pme.getPoll().getVoteTally(); tally.setActivityLock(lock); long expiration = 0; Deadline d; NodeManager nm = getDaemon().getNodeManager(tally.getArchivalUnit()); nm.startPoll(tally.getCachedUrlSet(), tally, true); if (replayNeeded) { theLog.debug2("starting replay of poll " + key); PollFactory pollFact = getPollFactory(pme.poll.getVersion()); // should be equivalent to this. is it? // PollFactory pollFact = getPollFactory(pme.spec); if (pollFact != null) { expiration = pollFact.getMaxPollDuration(Poll.V1_CONTENT_POLL); } else { expiration = 0; // XXX } d = Deadline.in(expiration); tally.startReplay(d); } else { pme.poll.stopPoll(); } theLog.debug3("completed resume poll " + (String) key); } /** * handle an incoming message packet. This will create a poll if * one is not already running. It will then call recieveMessage on * the poll. This was moved from node state which kept track of the polls * running in the node. This will need to be moved or amended to support this. * @param msg the message used to generate the poll * @throws IOException thrown if the poll was unsuccessfully created */ void handleIncomingMessage(LcapMessage msg) throws IOException { if (theLog.isDebug2()) theLog.debug2("Got a message: " + msg); PollFactory fact = getPollFactory(msg); if(fact.isDuplicateMessage(msg, this)) { theLog.debug3("Dropping duplicate message:" + msg); return; } String key = msg.getKey(); PollManagerEntry pme; if (msg instanceof V1LcapMessage) { // Needed for TestPollManager.testCloseThePoll to pass. pme = getCurrentOrRecentV1PollEntry(key); } else { pme = getPollManagerEntry(key); } if(pme != null) { if(pme.isPollCompleted() || pme.isPollSuspended()) { theLog.debug("Message received after poll was closed." + msg); return; } } BasePoll p = findPoll(msg); if (p != null) { p.setMessage(msg); p.receiveMessage(msg); } } /** * Find the poll defined by the <code>Message</code>. If the poll * does not exist this will create a new poll (iff there are no conflicts) * @param msg <code>Message</code> * @return <code>Poll</code> which matches the message opcode, or a new * poll, or null if the new poll would conflict with a currently running poll. * @throws IOException if message opcode is unknown. */ synchronized BasePoll findPoll(LcapMessage msg) throws IOException { String key = msg.getKey(); BasePoll ret = null; PollManagerEntry pme = getPollManagerEntry(key); if (pme == null) { theLog.debug3("findPoll: Making new poll: " + key); ret = makePoll(msg); if (theLog.isDebug3()) { if (ret != null) { theLog.debug3("findPoll: Made new poll: " + key); } else { theLog.debug3("findPoll: Did not make new poll: " + key); } } } else { theLog.debug3("findPoll: Returning existing poll: " + key); ret = pme.poll; } return ret; } /** * make a new poll of the type and version defined by the incoming message. * @param msg <code>Message</code> to use for * @return a new Poll object of the required type, or null if we don't * want to run this poll now (<i>ie</i>, due to a conflict with another * poll). * @throws ProtocolException if message opcode is unknown */ BasePoll makePoll(LcapMessage msg) throws ProtocolException { theLog.debug2("makePoll: From message: " + msg); // XXX: V3 Refactor - this could be cleaned up // Dispatch on the type of the msg. if (msg instanceof V1LcapMessage) { return makeV1Poll((V1LcapMessage)msg); } else if (msg instanceof V3LcapMessage) { return makeV3Voter((V3LcapMessage)msg); } else { throw new ProtocolException("Unexpected LCAP Message type."); } } /** * Make a V3Voter. */ private BasePoll makeV3Voter(V3LcapMessage msg) throws ProtocolException { PollSpec spec = new PollSpec(msg); long duration = msg.getDuration(); PeerIdentity orig = msg.getOriginatorId(); String hashAlg = msg.getHashAlgorithm(); theLog.debug("Making V3Voter from: " + spec); PollFactory pollFact = getPollFactory(spec); BasePoll poll = pollFact.createPoll(spec, getDaemon(), orig, duration, hashAlg, msg); if (poll != null && !(poll instanceof V3Voter)) { throw new ProtocolException("msg "+msg+ " made unexpected kind of poll: "+poll); } processNewPoll(poll, msg); return poll; } /** * V1 for testing only. */ private BasePoll makeV1Poll(V1LcapMessage msg) throws ProtocolException { PollSpec spec = new PollSpec(msg); long duration = msg.getDuration(); PeerIdentity orig = msg.getOriginatorId(); String hashAlg = msg.getHashAlgorithm(); CachedUrlSet cus = spec.getCachedUrlSet(); // check for presence of item in the cache if (cus == null) { theLog.debug2("Ignoring poll request, don't have AU: " + spec.getAuId()); return null; } ArchivalUnit au = cus.getArchivalUnit(); if (!spec.getPluginVersion().equals(AuUtil.getPollVersion(au))) { theLog.debug("Ignoring poll request for " + au.getName() + " from peer " + msg.getOriginatorId() + ". plugin version mismatch; have: " + AuUtil.getPollVersion(au) + ", need: " + spec.getPluginVersion()); return null; } theLog.debug("Making poll from: " + spec); PollFactory pollFact = getPollFactory(spec); BasePoll poll = pollFact.createPoll(spec, getDaemon(), orig, duration, hashAlg, msg); processNewPoll(poll, msg); return poll; } private BasePoll makePoller(PollSpec spec, long duration, PeerIdentity orig) throws ProtocolException { theLog.debug("Making poll from: " + spec); // If this is a V3 PollSpec, passing null to V3PollFactory will // create a V3Poller PollFactory pollFact = getPollFactory(spec); String hashAlg = LcapMessage.getDefaultHashAlgorithm(); BasePoll poll = pollFact.createPoll(spec, getDaemon(), orig, duration, hashAlg, null); processNewPoll(poll, null); return poll; } /** * If poll is not null, do what needs to be done to new polls. */ private void processNewPoll(BasePoll poll, LcapMessage msg) { if (poll != null) { poll.setMessage(msg); synchronized (pollMapLock) { thePolls.put(poll.getKey(), new PollManagerEntry(poll)); } poll.startPoll(); } } /** * close the poll from any further voting * @param key the poll signature */ public void closeThePoll(String key) { PollManagerEntry pme; synchronized (pollMapLock) { pme = thePolls.remove(key); } if (pme == null || pme.poll == null) { theLog.warning("Attempt to close unknown poll : " + key); return; } // mark the poll completed because if we need to call a repair poll // we don't want this one to be in conflict with it. // PollTally tally = pme.poll.getVoteTally(); BasePoll p = pme.getPoll(); pme.setPollCompleted(); synchronized (pollMapLock) { theRecentPolls.put(key, pme); } try { theIDManager.storeIdentities(); } catch (ProtocolException ex) { theLog.error("Unable to write Identity DB file."); } NodeManager nm = getDaemon().getNodeManager(p.getAu()); // XXX: This is hacked up, admittedly. The entire NodeManager // and repository are getting overhauled anyway, so it makes // no sense to do the "right" thing here by integrating this // into the NodeManager somehow. if (p.getType() == Poll.V3_POLL) { // Retrieve the node state for the top-level AU NodeStateImpl ns = (NodeStateImpl)nm.getNodeState(p.getCachedUrlSet()); if (ns != null) ns.closeV3Poll(p.getKey()); } // XXX: V3 -- Only required for V1 polls. // // Don't tell the node manager about verify polls // If closing a name poll that started ranged subpolls, don't tell // the node manager about it until all ranged subpolls have finished if ((p.getType() == Poll.V1_NAME_POLL || p.getType() == Poll.V1_CONTENT_POLL) && !p.isSubpollRunning()) { V1PollTally tally = (V1PollTally)p.getVoteTally(); // if closing last name poll, concatenate all the name lists into the // first tally and pass that to node manager if (p.getType() == Poll.V1_NAME_POLL) { V1PollTally lastTally = (V1PollTally)tally; tally = lastTally.concatenateNameSubPollLists(); } theLog.debug("handing poll results to node manager: " + tally); nm.updatePollResults(p.getCachedUrlSet(), tally); // free the activity lock ActivityRegulator.Lock lock = tally.getActivityLock(); if(lock != null) { lock.expire(); } } } /** * getActivePollSpecIterator returns an Iterator over the set of * PollSpec instances which currently have active polls on the given au. * @return Iterator over set of PollSpec */ protected Iterator getActivePollSpecIterator(ArchivalUnit au, BasePoll dontIncludePoll) { Set pollspecs = new HashSet(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { ArchivalUnit pau = pme.poll.getCachedUrlSet().getArchivalUnit(); if (pau == au && pme.poll != dontIncludePoll && !pme.isPollCompleted()) { pollspecs.add(pme.poll.getPollSpec()); } } } return (pollspecs.iterator()); } public void raiseAlert(Alert alert) { theAlertManager.raiseAlert(alert); } public void raiseAlert(Alert alert, String msg) { theAlertManager.raiseAlert(alert, msg); } /** * Ask that the specified poll runner task be executed. */ public void runTask(PollRunner.Task task) { theTaskRunner.runTask(task); } /** * send a message to the multicast address for this archival unit * @param msg the LcapMessage to send * @param au the ArchivalUnit for this message * @throws IOException */ void sendMessage(V1LcapMessage msg, ArchivalUnit au) throws IOException { if(theRouter != null) { theRouter.send(msg, au); } } /** * send a message to the unicast address given by an identity * @param msg the LcapMessage to send * @param au the ArchivalUnit for this message * @param id the PeerIdentity of the identity to send to * @throws IOException */ void sendMessageTo(V1LcapMessage msg, ArchivalUnit au, PeerIdentity id) throws IOException { theRouter.sendTo(msg, au, id); } /** * send a message to the unicast address given by an identity * @param msg the LcapMessage to send * @param id the PeerIdentity of the identity to send to * @throws IOException */ public void sendMessageTo(V3LcapMessage msg, PeerIdentity id) throws IOException { theRouter.sendTo(msg, id); } /** * @return the state directory for the given V3 poll. */ // CR: add getStateDir() to BasePoll to avoid downcast public File getStateDir(String pollKey) { if (pollKey == null) return null; Poll p = this.getPoll(pollKey); if (p != null) { if (p instanceof V3Voter) { return ((V3Voter)p).getStateDir(); } else if (p instanceof V3Poller) { return ((V3Poller)p).getStateDir(); } } return null; } IdentityManager getIdentityManager() { return theIDManager; } HashService getHashService() { return theHashService; } public void setConfig(Configuration newConfig, Configuration oldConfig, Configuration.Differences changedKeys) { if (changedKeys.contains(PREFIX)) { m_recentPollExpireTime = newConfig.getTimeInterval(PARAM_RECENT_EXPIRATION, DEFAULT_RECENT_EXPIRATION); synchronized (pollMapLock) { theRecentPolls.setInterval(m_recentPollExpireTime); } enablePollers = newConfig.getBoolean(PARAM_ENABLE_V3_POLLER, DEFAULT_ENABLE_V3_POLLER); enableVoters = newConfig.getBoolean(PARAM_ENABLE_V3_VOTER, DEFAULT_ENABLE_V3_VOTER); autoPollAuClassess = newConfig.getList(PARAM_AUTO_POLL_AUS, DEFAULT_AUTO_POLL_AUS); for (ListIterator<String> iter = autoPollAuClassess.listIterator(); iter.hasNext(); ) { iter.set(iter.next().toLowerCase()); } deleteInvalidPollStateDirs = newConfig.getBoolean(PARAM_DELETE_INVALID_POLL_STATE_DIRS, DEFAULT_DELETE_INVALID_POLL_STATE_DIRS); paramDiscardSavedPolls = newConfig.getBoolean(PARAM_DISCARD_SAVED_POLLS, DEFAULT_DISCARD_SAVED_POLLS); paramToplevelPollInterval = newConfig.getTimeInterval(PARAM_TOPLEVEL_POLL_INTERVAL, DEFAULT_TOPLEVEL_POLL_INTERVAL); pollStartInitialDelay = newConfig.getTimeInterval(PARAM_START_POLLS_INITIAL_DELAY, DEFAULT_START_POLLS_INITIAL_DELAY); paramQueueEmptySleep = newConfig.getTimeInterval(PARAM_QUEUE_EMPTY_SLEEP, DEFAULT_QUEUE_EMPTY_SLEEP); paramMaxPollersSleep = newConfig.getTimeInterval(PARAM_MAX_POLLERS_SLEEP, DEFAULT_MAX_POLLERS_SLEEP); paramPollQueueMax = newConfig.getInt(PARAM_POLL_QUEUE_MAX, DEFAULT_POLL_QUEUE_MAX); paramRebuildPollQueueInterval = newConfig.getTimeInterval(PARAM_REBUILD_POLL_QUEUE_INTERVAL, DEFAULT_REBUILD_POLL_QUEUE_INTERVAL); paramMinPollAttemptInterval = newConfig.getTimeInterval(PARAM_MIN_POLL_ATTEMPT_INTERVAL, DEFAULT_MIN_POLL_ATTEMPT_INTERVAL); boolean oldEnable = enableV3Poller; enableV3Poller = newConfig.getBoolean(PARAM_ENABLE_V3_POLLER, DEFAULT_ENABLE_V3_POLLER); maxSimultaneousPollers = newConfig.getInt(PARAM_MAX_SIMULTANEOUS_V3_POLLERS, DEFAULT_MAX_SIMULTANEOUS_V3_POLLERS); enablePollStarterThrottle = newConfig.getBoolean(PARAM_ENABLE_POLL_STARTER_THROTTLE, DEFAULT_ENABLE_POLL_STARTER_THROTTLE); isAsynch = newConfig.getBoolean(PARAM_PSM_ASYNCH, DEFAULT_PSM_ASYNCH); wrongGroupRetryTime = newConfig.getTimeInterval(PARAM_WRONG_GROUP_RETRY_TIME, DEFAULT_WRONG_GROUP_RETRY_TIME); paramMinPercentForRepair = newConfig.getPercentage(V3Voter.PARAM_MIN_PERCENT_AGREEMENT_FOR_REPAIRS, V3Voter.DEFAULT_MIN_PERCENT_AGREEMENT_FOR_REPAIRS); paramWillingRepairerLiveness = newConfig.getTimeInterval(PARAM_WILLING_REPAIRER_LIVENESS, DEFAULT_WILLING_REPAIRER_LIVENESS); paramAcceptRepairersPollPercent = newConfig.getPercentage(PARAM_ACCEPT_REPAIRERS_POLL_PERCENT, DEFAULT_ACCEPT_REPAIRERS_POLL_PERCENT); paramInvitationWeightAtRisk = newConfig.getDouble(PARAM_INVITATION_WEIGHT_AT_RISK, DEFAULT_INVITATION_WEIGHT_AT_RISK); paramInvitationWeightAlreadyRepairable = newConfig.getDouble(PARAM_INVITATION_WEIGHT_ALREADY_REPAIRABLE, DEFAULT_INVITATION_WEIGHT_ALREADY_REPAIRABLE); List<String> noInvitationIps = newConfig.getList(V3Poller.PARAM_NO_INVITATION_SUBNETS, null); if (noInvitationIps == null || noInvitationIps.isEmpty()) { noInvitationSubnetFilter = null; } else { try { IpFilter filter = new IpFilter(); filter.setFilters(noInvitationIps, Collections.EMPTY_LIST); noInvitationSubnetFilter = filter; } catch (IpFilter.MalformedException e) { theLog.warning("Malformed noInvitationIps, not installed: " + noInvitationIps, e); } } if (changedKeys.contains(PARAM_AT_RISK_AU_INSTANCES) && theIDManager != null) { atRiskAuInstances = makeAuPeersMap(newConfig.getList(PARAM_AT_RISK_AU_INSTANCES), theIDManager); } if (changedKeys.contains(PARAM_REPUTATION_TRANSFER_MAP) && theIDManager != null) { reputationTransferMap = makeReputationPeerMap(newConfig.getList(PARAM_REPUTATION_TRANSFER_MAP), theIDManager); } if (changedKeys.contains(PARAM_INVITATION_WEIGHT_AGE_CURVE)) { v3InvitationWeightAgeCurve = processWeightCurve("V3 invitation weight age curve", newConfig, PARAM_INVITATION_WEIGHT_AGE_CURVE, DEFAULT_INVITATION_WEIGHT_AGE_CURVE); } // if (changedKeys.contains(PARAM_INVITATION_WEIGHT_SAFETY_CURVE)) { // v3InvitationWeightSafetyCurve = // processWeightCurve("V3 invitation weight safety curve", // newConfig, // PARAM_INVITATION_WEIGHT_SAFETY_CURVE, // DEFAULT_INVITATION_WEIGHT_SAFETY_CURVE); // } if (changedKeys.contains(PARAM_POLL_INTERVAL_AGREEMENT_CURVE)) { pollIntervalAgreementCurve = processWeightCurve("V3 poll interval agreement curve", newConfig, PARAM_POLL_INTERVAL_AGREEMENT_CURVE, DEFAULT_POLL_INTERVAL_AGREEMENT_CURVE); } if (changedKeys.contains(PARAM_POLL_INTERVAL_AT_RISK_PEERS_CURVE)) { pollIntervalAtRiskPeersCurve = processWeightCurve("V3 poll interval at risk peers curve", newConfig, PARAM_POLL_INTERVAL_AT_RISK_PEERS_CURVE, DEFAULT_POLL_INTERVAL_AT_RISK_PEERS_CURVE); } if (changedKeys.contains(PARAM_POLL_INTERVAL_AGREEMENT_LAST_RESULT)) { List<String> lst = newConfig.getList(PARAM_POLL_INTERVAL_AGREEMENT_LAST_RESULT, DEFAULT_POLL_INTERVAL_AGREEMENT_LAST_RESULT); Set res = new HashSet(); for (String str : lst) { res.add(Integer.valueOf(str)); } pollIntervalAgreementLastResult = res; } if (changedKeys.contains(PARAM_POLL_WEIGHT_AT_RISK_PEERS_CURVE)) { pollWeightAtRiskPeersCurve = processWeightCurve("V3 poll weight at risk peers curve", newConfig, PARAM_POLL_WEIGHT_AT_RISK_PEERS_CURVE, DEFAULT_POLL_WEIGHT_AT_RISK_PEERS_CURVE); } if (changedKeys.contains(PARAM_ACCEPT_PROBABILITY_SAFETY_CURVE)) { v3AcceptProbabilitySafetyCurve = processWeightCurve("V3 accept probability safety curve", newConfig, PARAM_ACCEPT_PROBABILITY_SAFETY_CURVE, DEFAULT_ACCEPT_PROBABILITY_SAFETY_CURVE); } if (changedKeys.contains(PARAM_NOMINATION_WEIGHT_AGE_CURVE)) { v3NominationWeightAgeCurve = processWeightCurve("V3 nomination weight age curve", newConfig, PARAM_NOMINATION_WEIGHT_AGE_CURVE, DEFAULT_NOMINATION_WEIGHT_AGE_CURVE); } if (changedKeys.contains(PARAM_NO_AU_RESET_INTERVAL_CURVE)) { v3NoAuResetIntervalCurve = processWeightCurve("V3 no-AU reset interval curve", newConfig, PARAM_NO_AU_RESET_INTERVAL_CURVE, DEFAULT_NO_AU_RESET_INTERVAL_CURVE); } if (changedKeys.contains(PARAM_VOTE_RETRY_INTERVAL_DURATION_CURVE)) { v3VoteRetryIntervalDurationCurve = processWeightCurve("V3 vote message retry interval age curve", newConfig, PARAM_VOTE_RETRY_INTERVAL_DURATION_CURVE, DEFAULT_VOTE_RETRY_INTERVAL_DURATION_CURVE); } needRebuildPollQueue(); } long scommTimeout = newConfig.getTimeInterval(BlockingStreamComm.PARAM_CONNECT_TIMEOUT, BlockingStreamComm.DEFAULT_CONNECT_TIMEOUT); long psmRunnerTimeout = newConfig.getTimeInterval(PsmManager.PARAM_RUNNER_IDLE_TIME, PsmManager.DEFAULT_RUNNER_IDLE_TIME); long addedTimeout = newConfig.getTimeInterval(PARAM_ADDED_POLL_DELAY, DEFAULT_ADDED_POLL_DELAY); interPollStartDelay = (Math.max(scommTimeout, psmRunnerTimeout) + addedTimeout); for (int i = 0; i < pf.length; i++) { if (pf[i] != null) { pf[i].setConfig(newConfig, oldConfig, changedKeys); } } } /** Build reputation map backwards (toPid -> fromPid) because it's * accessed to determine whether a repair should be served (as opposed to * when reputation is established), so we need to look up toPid to see if * another peer's reputation should be extended to it. This implies that * only one peer's reputation may be extended to any other peer */ Peer2PeerMap makeReputationPeerMap(Collection<String> peerPairs, IdentityManager idMgr) { Peer2PeerMap res = new Peer2PeerMap(); for (String onePair : peerPairs) { List<String> lst = StringUtil.breakAt(onePair, ',', -1, true, true); if (lst.size() == 2) { try { PeerIdentity pid1 = idMgr.stringToPeerIdentity(lst.get(0)); PeerIdentity pid2 = idMgr.stringToPeerIdentity(lst.get(1)); res.put(pid2, pid1); if (theLog.isDebug2()) { theLog.debug2("Extend reputation from " + pid1 + " to " + pid2); } } catch (IdentityManager.MalformedIdentityKeyException e) { theLog.warning("Bad peer id in peer2peer map", e); } } else { theLog.warning("Malformed reputation mapping: " + onePair); } } return res; } AuPeersMap makeAuPeersMap(Collection<String> auPeersList, IdentityManager idMgr) { AuPeersMap res = new AuPeersMap(); Map<Integer,MutableInt> hist = new TreeMap<Integer,MutableInt>(); for (String oneAu : auPeersList) { List<String> lst = StringUtil.breakAt(oneAu, ',', -1, true, true); if (lst.size() >= 2) { String auid = null; Set peers = new HashSet(); for (String s : lst) { if (auid == null) { auid = s; } else { try { PeerIdentity pid = idMgr.stringToPeerIdentity(s); peers.add(pid); } catch (IdentityManager.MalformedIdentityKeyException e) { theLog.warning("Bad peer on at risk list for " + auid, e); } } } res.put(auid, peers); int size = peers.size(); MutableInt n = hist.get(size); if (n == null) { n = new MutableInt(); hist.put(size, n); } n.add(1); } } StringBuilder sb = new StringBuilder(); sb.append("AU peers hist:\nAUs at risk on\n\tPeers"); for (Map.Entry<Integer,MutableInt> ent : hist.entrySet()) { sb.append("\n"); sb.append(ent.getKey()); sb.append("\t"); sb.append(ent.getValue()); } theLog.debug(sb.toString()); return res; } public PeerIdentity getReputationTransferredFrom(PeerIdentity pid) { if (reputationTransferMap != null) { return reputationTransferMap.get(pid); } return null; } public Set<PeerIdentity> getPeersWithAuAtRisk(ArchivalUnit au) { if (atRiskAuInstances == null) { return null; } return atRiskAuInstances.get(au.getAuId()); } CompoundLinearSlope processWeightCurve(String name, Configuration config, String param, String dfault) { String probCurve = config.get(param, dfault); if (StringUtil.isNullString(probCurve)) { return null; } else { try { CompoundLinearSlope curve = new CompoundLinearSlope(probCurve); theLog.info("Installed " + name + ": " + curve); return curve; } catch (Exception e) { theLog.warning("Malformed " + name + ": " + probCurve, e); return null; } } } public boolean isAsynch() { return isAsynch; } public long getWrongGroupRetryTime() { return wrongGroupRetryTime; } public IpFilter getNoInvitationSubnetFilter() { return noInvitationSubnetFilter; } public CompoundLinearSlope getInvitationWeightAgeCurve() { return v3InvitationWeightAgeCurve; } // public CompoundLinearSlope getInvitationWeightSafetyCurve() { // return v3InvitationWeightSafetyCurve; // } public CompoundLinearSlope getAcceptProbabilitySafetyCurve() { return v3AcceptProbabilitySafetyCurve; } public CompoundLinearSlope getNominationWeightAgeCurve() { return v3NominationWeightAgeCurve; } public double getInvitationWeightAtRisk() { return paramInvitationWeightAtRisk; } public double getInvitationWeightAlreadyRepairable() { return paramInvitationWeightAlreadyRepairable; } public CompoundLinearSlope getPollIntervalAgreementCurve() { return pollIntervalAgreementCurve; } public Set getPollIntervalAgreementLastResult() { return pollIntervalAgreementLastResult; } public CompoundLinearSlope getVoteRetryIntervalDurationCurve() { return v3VoteRetryIntervalDurationCurve; } public long getWillingRepairerLiveness() { return paramWillingRepairerLiveness; } public double getAcceptRepairersPollPercent() { return paramAcceptRepairersPollPercent; } public double getMinPercentForRepair() { return paramMinPercentForRepair; } public boolean isNoInvitationSubnet(PeerIdentity pid) { IpFilter filter = getNoInvitationSubnetFilter(); return filter != null && pid.getPeerAddress().isAllowed(filter); } // Ensure only a single instance of a noAuSet exists for each AU, so can // synchronize on them and use in multiple threads. Map<ArchivalUnit,DatedPeerIdSet> noAuPeerSets = new ReferenceMap(ReferenceMap.HARD, ReferenceMap.WEAK); /** Return the noAuSet for the AU. If an instance of the noAuSet for * this AU already exists in memory it will be returned. The caller must * synchronize on that object before operating on it */ public DatedPeerIdSet getNoAuPeerSet(ArchivalUnit au) { synchronized (noAuPeerSets) { DatedPeerIdSet noAuSet = noAuPeerSets.get(au); if (noAuSet == null) { HistoryRepository historyRepo = getDaemon().getHistoryRepository(au); noAuSet = historyRepo.getNoAuPeerSet(); noAuPeerSets.put(au, noAuSet); } return noAuSet; } } /** Clear the noAuSet if it's older than the interval specified as a * function of the AU's age by v3NoAuResetIntervalCurve */ public void ageNoAuSet(ArchivalUnit au, DatedPeerIdSet noAuSet) { try { if (noAuSet.isEmpty()) { return; } long lastTimestamp = noAuSet.getDate(); if (lastTimestamp < 0) { return; } AuState state = AuUtil.getAuState(au); long auAge = TimeBase.msSince(state.getAuCreationTime()); long threshold = (long)Math.round(v3NoAuResetIntervalCurve.getY(auAge)); if (TimeBase.msSince(lastTimestamp) >= threshold) { noAuSet.clear(); noAuSet.store(false); } } catch (IOException e) { // impossible with loaded PersistentPeerIdSet theLog.warning("Impossible error in loaded PersistentPeerIdSet", e); } } public PollFactory getPollFactory(PollSpec spec) { return getPollFactory(spec.getProtocolVersion()); } public PollFactory getPollFactory(LcapMessage msg) { return getPollFactory(msg.getProtocolVersion()); } public PollFactory getPollFactory(int version) { try { return pf[version]; } catch (ArrayIndexOutOfBoundsException e) { theLog.error("Unknown poll version: " + version, e); return null; } } /** * Load and start V3 polls that are found in a serialized state * on the disk. If the poll has expired, or if the state has been * corrupted, delete the poll directory. */ private void preloadStoredPolls() { this.serializedPollers = new HashMap(); this.serializedVoters = new HashMap(); File stateDir = PollUtil.ensurePollStateRoot(); File[] dirs = stateDir.listFiles(); if (dirs == null || dirs.length == 0) { theLog.debug2("No saved polls found."); return; } for (int ix = 0; ix < dirs.length; ix++) { boolean restored = false; // 1. See if there's a serialized poller. if (enablePollers) { File poller = new File(dirs[ix], V3PollerSerializer.POLLER_STATE_BEAN); if (poller.exists()) { if (paramDiscardSavedPolls) { theLog.debug("Discarding poll in directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); continue; } // Add this poll dir to the serialized polls map. try { V3PollerSerializer pollSerializer = new V3PollerSerializer(getDaemon(), dirs[ix]); PollerStateBean psb = pollSerializer.loadPollerState(); // Check to see if this poll has expired. boolean expired = psb.getPollDeadline() <= TimeBase.nowMs(); if (expired) { theLog.debug("Discarding expires poll in directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); continue; } theLog.debug2("Found saved poll for AU " + psb.getAuId() + " in directory " + dirs[ix]); // CR: Should never be more than one saved poll per AU. Don't // need Set, and error if find more than one Set pollsForAu = null; if ((pollsForAu = (Set)serializedPollers.get(psb.getAuId())) == null) { pollsForAu = new HashSet(); serializedPollers.put(psb.getAuId(), pollsForAu); } pollsForAu.add(dirs[ix]); restored = true; } catch (PollSerializerException e) { theLog.error("Exception while trying to restore poller from " + "directory: " + dirs[ix] + ". Cleaning up dir.", e); FileUtil.delTree(dirs[ix]); continue; } } else { theLog.debug("No serialized poller found in dir " + dirs[ix]); } } // 2. See if there's a serialized voter. if (enableVoters) { File voter = new File(dirs[ix], V3VoterSerializer.VOTER_USER_DATA_FILE); if (voter.exists()) { if (paramDiscardSavedPolls) { theLog.debug("Discarding vote in directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); continue; } theLog.info("Found serialized voter in file: " + voter); try { V3VoterSerializer voterSerializer = new V3VoterSerializer(getDaemon(), dirs[ix]); VoterUserData vd = voterSerializer.loadVoterUserData(); // Check to see if this poll has expired. boolean expired = vd.getDeadline() <= TimeBase.nowMs(); if (expired) { theLog.debug("Discarding expired vote in directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); continue; } theLog.debug2("Found saved poll for AU " + vd.getAuId() + " in directory " + dirs[ix]); Set pollsForAu = null; if ((pollsForAu = (Set)serializedVoters.get(vd.getAuId())) == null) { pollsForAu = new HashSet(); serializedVoters.put(vd.getAuId(), pollsForAu); } pollsForAu.add(dirs[ix]); restored = true; } catch (PollSerializerException e) { theLog.error("Exception while trying to restore voter from " + "directory: " + dirs[ix] + ". Cleaning up dir.", e); FileUtil.delTree(dirs[ix]); continue; } } else { theLog.debug("No serialized voter found in dir " + dirs[ix]); } } // If neither a voter nor a poller was found, this dir can be // cleaned up, unless KEEP_INVALID_POLLSTATE_DIRS is true. if (!restored) { if (deleteInvalidPollStateDirs) { theLog.debug("Deleting invalid poll state directory " + dirs[ix]); FileUtil.delTree(dirs[ix]); } else { theLog.debug("Not deleting invalid poll state directory " + dirs[ix] + " due to config."); } } } } public void restoreAuPolls(ArchivalUnit au) { // Shouldn't happen. if (serializedPollers == null) { throw new NullPointerException("Null serialized poll map."); } if (serializedVoters == null) { throw new NullPointerException("Null serialized voter map."); } // Restore any pollers for this AU. // CR: Don't need loop here, s.b. max 1 poller per AU Set pollDirs = (Set)serializedPollers.get(au.getAuId()); if (pollDirs != null) { Iterator pollDirIter = pollDirs.iterator(); while (pollDirIter.hasNext()) { File dir = (File)pollDirIter.next(); try { V3Poller p = new V3Poller(getDaemon(), dir); addPoll(p); p.startPoll(); } catch (PollSerializerException e) { theLog.error("Unable to restore poller from dir: " + dir, e); } } serializedPollers.remove(au.getAuId()); } // Restore any voters for this AU. Set voterDirs = (Set)serializedVoters.get(au.getAuId()); if (voterDirs != null) { Iterator voterDirIter = voterDirs.iterator(); while (voterDirIter.hasNext()) { File dir = (File)voterDirIter.next(); try { V3Voter v = new V3Voter(getDaemon(), dir); addPoll(v); v.startPoll(); } catch (PollSerializerException e) { theLog.error("Unable to restore poller from dir: " + dir, e); } } serializedVoters.remove(au.getAuId()); } } public PollRunner getPollRunner() { return theTaskRunner; } //--------------- PollerStatus Accessors ----------------------------- public Collection getV1Polls() { Collection polls = new ArrayList(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (pme.getType() == Poll.V1_CONTENT_POLL || pme.getType() == Poll.V1_NAME_POLL || pme.getType() == Poll.V1_VERIFY_POLL) { polls.add(pme); } } for (Iterator it = theRecentPolls.values().iterator(); it.hasNext(); ) { PollManagerEntry pme = (PollManagerEntry)it.next(); if (pme.getType() == Poll.V1_CONTENT_POLL || pme.getType() == Poll.V1_NAME_POLL || pme.getType() == Poll.V1_VERIFY_POLL) { polls.add(pme); } } } return polls; } private Collection getActiveV3Pollers() { Collection polls = new ArrayList(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (pme.isV3Poll() && pme.getPoll() instanceof V3Poller) { polls.add(pme.getPoll()); } } } return polls; } private Collection getRecentV3Pollers() { Collection polls = new ArrayList(); synchronized (pollMapLock) { for (Iterator it = theRecentPolls.values().iterator(); it.hasNext(); ) { PollManagerEntry pme = (PollManagerEntry)it.next(); if (pme.isV3Poll() && pme.getPoll() instanceof V3Poller) { polls.add(pme.getPoll()); } } } return polls; } private Collection getActiveV3Voters() { Collection polls = new ArrayList(); synchronized (pollMapLock) { for (PollManagerEntry pme : thePolls.values()) { if (pme.isV3Poll() && pme.getPoll() instanceof V3Voter) { polls.add(pme.getPoll()); } } } return polls; } /** * Check the current policy to see if a request for a new V3Voter * should be rejected due to too many V3Voters already present. * @return true iff the number of active V3Voters is already at or * above the limit. */ public boolean tooManyV3Voters() { int maxVoters = CurrentConfig.getIntParam(V3Voter.PARAM_MAX_SIMULTANEOUS_V3_VOTERS, V3Voter.DEFAULT_MAX_SIMULTANEOUS_V3_VOTERS); int activeVoters = getActiveV3Voters().size(); if (activeVoters >= maxVoters) { theLog.info("Maximum number of active voters is " + maxVoters + "; " + activeVoters + " are already running."); return true; } return false; } private Collection getRecentV3Voters() { Collection polls = new ArrayList(); synchronized (pollMapLock) { for (Iterator it = theRecentPolls.values().iterator(); it.hasNext(); ) { PollManagerEntry pme = (PollManagerEntry)it.next(); if (pme.isV3Poll() && pme.getPoll() instanceof V3Voter) { polls.add(pme.getPoll()); } } } return polls; } // Used by V3PollStatus. public synchronized Collection getV3Pollers() { Collection polls = new ArrayList(); polls.addAll(getActiveV3Pollers()); polls.addAll(getRecentV3Pollers()); return polls; } // Used by V3PollStatus public synchronized Collection getV3Voters() { Collection polls = new ArrayList(); polls.addAll(getActiveV3Voters()); polls.addAll(getRecentV3Voters()); return polls; } // Used by V3PollStatus public BasePoll getPoll(String key) { PollManagerEntry pme = getCurrentOrRecentV3PollEntry(key); if(pme != null) { return pme.getPoll(); } return null; } //-------------- TestPollManager Accessors ---------------------------- /** * remove the poll represented by the given key from the poll table and * return it. * @param key the String representation of the polls key * @return Poll the poll if found or null */ BasePoll removePoll(String key) { synchronized (pollMapLock) { PollManagerEntry pme = thePolls.remove(key); return (pme != null) ? pme.poll : null; } } void addPoll(BasePoll p) { synchronized (pollMapLock) { thePolls.put(p.getKey(), new PollManagerEntry(p)); } } boolean isPollActive(String key) { PollManagerEntry pme = getPollManagerEntry(key); return (pme != null) ? pme.isPollActive() : false; } boolean isPollClosed(String key) { PollManagerEntry pme; synchronized (pollMapLock) { pme = (PollManagerEntry)theRecentPolls.get(key); } return (pme != null) ? pme.isPollCompleted() : false; } boolean isPollSuspended(String key) { PollManagerEntry pme = getPollManagerEntry(key); return (pme != null) ? pme.isPollSuspended() : false; } static BasePoll makeTestPoll(LcapMessage msg) throws ProtocolException { if(theManager == null) { theManager = new PollManager(); } return theManager.makePoll(msg); } long getSlowestHashSpeed() { return theSystemMetrics.getSlowestHashSpeed(); } long getBytesPerMsHashEstimate() throws SystemMetrics.NoHashEstimateAvailableException { return theSystemMetrics.getBytesPerMsHashEstimate(); } public boolean hasPoll(String key) { synchronized (pollMapLock) { return thePolls.containsKey(key); } } public V3PollStatusAccessor getV3Status() { return v3Status; } // ---------------- Callbacks ----------------------------------- class RouterMessageHandler implements LcapRouter.MessageHandler { public void handleMessage(LcapMessage msg) { theLog.debug3("received from router message:" + msg.toString()); try { handleIncomingMessage(msg); } catch (IOException ex) { theLog.error("handleIncomingMessage() threw", ex); } } } /** * <p>PollManagerEntry: </p> * <p>Description: Class to represent the data store in the polls table. * @version 1.0 */ public static class PollManagerEntry { private BasePoll poll; private PollSpec spec; private int type; private Deadline pollDeadline; private Deadline deadline; private String key; PollManagerEntry(BasePoll p) { poll = p; spec = p.getPollSpec(); type = p.getPollSpec().getPollType(); key = p.getKey(); pollDeadline = p.getDeadline(); deadline = null; } boolean isPollActive() { return poll.isPollActive(); } boolean isPollCompleted() { return poll.isPollCompleted(); } boolean isPollSuspended() { if (isV3Poll()) return false; return poll.getVoteTally().stateIsSuspended(); } synchronized void setPollCompleted() { if (!isV3Poll()) { PollTally tally = poll.getVoteTally(); tally.tallyVotes(); } } synchronized void setPollSuspended() { poll.getVoteTally().setStateSuspended(); if(deadline != null) { deadline.expire(); deadline = null; } } public String getStatusString() { // Hack for V3. if (isV3Poll()) { return poll.getStatusString(); } else { if (isPollCompleted()) { return poll.getVoteTally().getStatusString(); } else if(isPollActive()) { return "Active"; } else if(isPollSuspended()) { return "Repairing"; } return "Unknown"; } } public String getTypeString() { return Poll.POLL_NAME[type]; } public String getShortKey() { return(key.substring(0,10)); } public String getKey() { return key; } public BasePoll getPoll() { return poll; } public int getType() { return type; } public PollSpec getPollSpec() { return spec; } public Deadline getPollDeadline() { return pollDeadline; } public Deadline getDeadline() { return deadline; } public boolean isSamePoll(PollSpec otherSpec) { if(this.type == otherSpec.getPollType()) { return this.spec.getCachedUrlSet().equals(otherSpec.getCachedUrlSet()); } return false; } /** * Convenience method * @return True iff this is a V3 poll. */ // XXX: V3 -- Remove when V1 polling is no longer supported. public boolean isV3Poll() { return (this.type == Poll.V3_POLL); } } /* * XXX: This is a temporary class to hold AU-specific status for * V3 polls. Eventually, the goal is to replace the current * node and poll history with a centralized V3-centric poll * history mechanism. Until then, this in-memory structure will * hold poll history for V3 AUs between reboots. */ public class V3PollStatusAccessor { HashMap map; long nextPollStartTime = -1; public V3PollStatusAccessor() { map = new HashMap(); } private V3PollStatusAccessorEntry getEntry(String auId) { V3PollStatusAccessorEntry e = (V3PollStatusAccessorEntry)map.get(auId); if (e == null) { e = new V3PollStatusAccessorEntry(); map.put(auId, e); } return e; } /** * Set the last completed V3 poll time for an AU. * * @param auId The ID of the Archival Unit. * @param lastPollTime The timestamp of the last completed V3 poll. */ public void setLastPollTime(String auId, long lastPollTime) { getEntry(auId).lastPollTime = lastPollTime; } /** * Get the last completed V3 poll time for an AU. * * @param auId The ID of the Archival Unit. * @return The timestamp of the last completed V3 poll. */ public long getLastPollTime(String auId) { return getEntry(auId).lastPollTime; } /** * Increment the number of completed V3 polls for an AU. * * @param auId The ID of the Archival Unit. */ public void incrementNumPolls(String auId) { getEntry(auId).numPolls++; } /** * Return the number of polls (since the last restart) for * an Archival Unit. * * @param auId The ID of the Archival Unit. * @return The number of completed V3 polls since the last * daemon restart. */ public int getNumPolls(String auId) { return getEntry(auId).numPolls; } /** * Set the percent agreement for an archival unit as of the * last completed V3 poll. * * @param auId The ID of the Archival Unit. * @param agreement The percent agreement as of the last completed V3 poll. */ public void setAgreement(String auId, float agreement) { getEntry(auId).agreement = agreement; } /** * Return the percent agreement for an archival unit as of the last * completed V3 poll. * * @param auId The ID of the Archival Unit. * @return The percent agreement as of the last completed V3 poll. */ public float getAgreement(String auId) { return getEntry(auId).agreement; } public void setNextPollStartTime(Deadline when) { if (when == null) { nextPollStartTime = -1; } else { nextPollStartTime = when.getExpirationTime(); } } public Deadline getNextPollStartTime() { if (nextPollStartTime > -1) { return Deadline.restoreDeadlineAt(nextPollStartTime); } else { return null; } } /** * Clear the poll history map. */ public void clear() { map.clear(); } } /* * Just a struct to hold status information per-au */ // CR: Seth thinks this is redundant private static class V3PollStatusAccessorEntry { public long lastPollTime = -1; public int numPolls = 0; public float agreement = 0.0f; } /** LOCKSS Runnable responsible for occasionally scanning for AUs that * need polls. */ private class PollStarter extends LockssRunnable { static final String PRIORITY_PARAM_POLLER = "Poller"; static final int PRIORITY_DEFAULT_POLLER = Thread.NORM_PRIORITY - 1; private LockssDaemon lockssDaemon; private PollManager pollManager; private volatile boolean goOn = true; public PollStarter(LockssDaemon lockssDaemon, PollManager pollManager) { super("PollStarter"); this.lockssDaemon = lockssDaemon; this.pollManager = pollManager; } public void lockssRun() { // Triggur the LockssRun thread watchdog on exit. triggerWDogOnExit(true); setPriority(PRIORITY_PARAM_POLLER, PRIORITY_DEFAULT_POLLER); if (goOn) { try { theLog.debug("Waiting until AUs started"); lockssDaemon.waitUntilAusStarted(); Deadline initial = Deadline.in(pollStartInitialDelay); pollManager.getV3Status().setNextPollStartTime(initial); initial.sleep(); } catch (InterruptedException e) { // just wakeup and check for exit } } while (goOn) { pollManager.getV3Status().setNextPollStartTime(null); try { startOnePoll(); } catch (RuntimeException e) { // Can happen if AU deactivated recently theLog.debug2("Error starting poll", e); // Avoid tight loop if startOnePoll() throws. Just being extra // cautious in case another bug similar to Roundup 4091 arises. try { Deadline errorWait = Deadline.in(Constants.MINUTE); errorWait.sleep(); } catch (InterruptedException ign) { // ignore } } catch (InterruptedException e) { // check goOn } } } public void stopPollStarter() { goOn = false; interruptThread(); } } boolean startOnePoll() throws InterruptedException { if (!enableV3Poller) { startOneWait.expireIn(paramMaxPollersSleep); } else { int activePollers = getActiveV3Pollers().size(); if (activePollers >= maxSimultaneousPollers) { startOneWait.expireIn(paramMaxPollersSleep); } else { PollReq req = pollQueue.nextReq(); if (req != null) { startPoll(req); // todo(bhayes): This seems odd; I would expect that this // would be a post-condition of nextReq(). pollQueue.cancelAuPolls(req.au); return true; } else { startOneWait.expireIn(paramQueueEmptySleep); } } } v3Status.setNextPollStartTime(startOneWait); while (!startOneWait.expired()) { try { startOneWait.sleep(); } catch (InterruptedException e) { // just wakeup and check } } return false; } public List<String> getAutoPollAuClasses() { return autoPollAuClassess; } public List<ArchivalUnit> getPendingQueueAus() { return pollQueue.getPendingQueueAus(); } public void enqueueHighPriorityPoll(ArchivalUnit au, PollSpec spec) throws NotEligibleException { if (au.getAuId() != spec.getAuId()) { throw new IllegalArgumentException("auId in au \""+au.getAuId() +"\" does not match auId in spec \"" +spec.getAuId()+"\""); } PollReq req = new PollManager.PollReq(au) .setPollSpec(spec) .setPriority(2); enqueueHighPriorityPoll(req); } private void enqueueHighPriorityPoll(PollReq req) throws NotEligibleException { theLog.debug2("enqueueHighPriorityPoll(" + req + ")"); if (!req.isHighPriority()) { throw new IllegalArgumentException( "High priority polls must have a positive priority: "+req); } // the check will throw NotEligibleException with an appropriate message. checkEligibleForPoll(req); pollQueue.enqueueHighPriorityPoll(req); } void needRebuildPollQueue() { // Expiration of these timers causes nextReq() to rebuild the poll // queue the next time it's called. As it doesn't trigger an immediate // event, there's no need for a short delay. pollQueue.needRebuildPollQueue(); startOneWait.expire(); } // testing will override. protected List<ArchivalUnit> weightedRandomSelection(Map<ArchivalUnit, Double> weightMap, int n) { return (List<ArchivalUnit>)CollectionUtil.weightedRandomSelection(weightMap, n); } /** Used to convey reason an AU is ineligible to be polled to clients for * logging/display */ public class NotEligibleException extends Exception { public NotEligibleException(String msg) { super(msg); } } private boolean isEligibleForPoll(PollReq req) { try { checkEligibleForPoll(req); return true; } catch (NotEligibleException e) { return false; } } // todo(bhayes): DebugPanel calls this, assuming the default is a V3 // Content Poll. On the other hand, it also creates an explicit // PollSpec for enqueueHighPriorityPoll. It feels as if in this API // the two calls should be parallel. Also possible would be for // checkEligibleForPoll to let the caller know if a high priority // would be eligible when a default priority would not. /** * @return true iff the ArchivalUnit would be eligible for a * "default" V3 Content Poll. */ public void checkEligibleForPoll(ArchivalUnit au) throws NotEligibleException { // todo(bhayes): This is creating a PollReq with no PollSpec, // purely for the prupose of checking eligibility, which happens // to not use the spec. checkEligibleForPoll(new PollReq(au)); } private void checkEligibleForPoll(PollReq req) throws NotEligibleException { ArchivalUnit au = req.au; // If a poll is already running, don't start another one. if (isPollRunning(au)) { throw new NotEligibleException("AU is already running a poll."); } checkAuClassAllowed(req); if (req.isHighPriority()) { return; } // Following tests suppressed for high priority (manually enqueued) // polls AuState auState = AuUtil.getAuState(req.au); // Does AU want to be polled? if (!au.shouldCallTopLevelPoll(auState)) { throw new NotEligibleException("AU does not want to be polled."); } // Do not call polls on AUs that have not crawled, UNLESS that AU // is marked pubdown. if (!auState.hasCrawled() && !AuUtil.isPubDown(au)) { theLog.debug3("Not crawled or down, not calling a poll on " + au); throw new NotEligibleException("AU has not crawled and is not marked down."); } long sinceLast = TimeBase.msSince(auState.getLastPollAttempt()); if (sinceLast < paramMinPollAttemptInterval) { String msg = "Poll attempted too recently (" + StringUtil.timeIntervalToString(sinceLast) + " < " + StringUtil.timeIntervalToString(paramMinPollAttemptInterval) + ")."; theLog.debug3(msg + " " + au); throw new NotEligibleException(msg); } } void checkAuClassAllowed(PollReq req) throws NotEligibleException { if (autoPollAuClassess.contains("all")) { return; } ArchivalUnit au = req.au; if (pluginMgr.isInternalAu(au) && autoPollAuClassess.contains("internal")) { return; } if (req.isHighPriority() && autoPollAuClassess.contains("priority")) { return; } throw new NotEligibleException("Only AU classes {" + StringUtil.separatedString(autoPollAuClassess, ", ") + "} are allowed to poll."); } /** Return a number proportional to the desirability of calling a poll on * the AU. */ double pollWeight(ArchivalUnit au) throws NotEligibleException { checkEligibleForPoll(au); AuState auState = AuUtil.getAuState(au); long lastEnd = auState.getLastTopLevelPollTime(); long pollInterval; if (pollIntervalAgreementCurve != null && pollIntervalAgreementLastResult.contains(auState.getLastPollResult())) { int agreePercent = (int)Math.round(auState.getV3Agreement() * 100.0); pollInterval = (int)pollIntervalAgreementCurve.getY(agreePercent); } else { pollInterval = paramToplevelPollInterval; } int numrisk = numPeersWithAuAtRisk(au); if (pollIntervalAtRiskPeersCurve != null) { int atRiskInterval = (int)pollIntervalAtRiskPeersCurve.getY(numrisk); if (atRiskInterval >= 0) { pollInterval = Math.min(pollInterval, atRiskInterval); } } if (lastEnd + pollInterval > TimeBase.nowMs()) { return 0.0; } long num = TimeBase.msSince(lastEnd); long denom = pollInterval + auState.getPollDuration(); double weight = (double)num / (double)denom; if (pollWeightAtRiskPeersCurve != null) { weight *= pollWeightAtRiskPeersCurve.getY(numrisk); } return weight; } int numPeersWithAuAtRisk(ArchivalUnit au) { Set peers = getPeersWithAuAtRisk(au); if (peers == null) { return 0; } return peers.size(); } boolean startPoll(PollReq req) { ArchivalUnit au = req.getAu(); if (isPollRunning(au)) { theLog.debug("Attempted to start poll when one is already running: " + au.getName()); return false; } // todo(bhayes): Should this be using the spec from the request? PollSpec spec = new PollSpec(au.getAuCachedUrlSet(), Poll.V3_POLL); theLog.debug("Calling a V3 poll on AU " + au); if (callPoll(au, spec) == null) { theLog.debug("pollManager.callPoll returned null. Failed to call " + "a V3 poll on " + au); return false; } // Add a delay to throttle poll starting. The delay is the sum of // the scomm timeout and an additional number of milliseconds. if (enablePollStarterThrottle) { try { Deadline dontStartPollBefore = Deadline.in(interPollStartDelay); v3Status.setNextPollStartTime(dontStartPollBefore); dontStartPollBefore.sleep(); } catch (InterruptedException ex) { // Just proceed to the next poll. } v3Status.setNextPollStartTime(null); } return true; } private Set recalcingAus = Collections.synchronizedSet(new HashSet()); /** Remember that we have scheduled a hash to recalculate the hash time * for this AU */ public void addRecalcAu(ArchivalUnit au) { recalcingAus.add(au); } /** Done with hash to recalculate the hash time for this AU */ public void removeRecalcAu(ArchivalUnit au) { recalcingAus.remove(au); } /** Return true if already have scheduled a hash to recalculate the hash * time for this AU */ public boolean isRecalcAu(ArchivalUnit au) { return recalcingAus.contains(au); } public enum EventCtr {Polls, Invitations, Accepted, Declined, Voted, ReceivedVoteReceipt, }; Map<EventCtr,MutableInt> eventCounters = new EnumMap<EventCtr,MutableInt>(EventCtr.class); Map<PollNak,MutableInt> voterNakEventCounters = new EnumMap<PollNak,MutableInt>(PollNak.class); int[] pollEndEventCounters = new int[POLLER_STATUS_STRINGS.length]; public void countEvent(EventCtr c) { synchronized (eventCounters) { MutableInt n = eventCounters.get(c); if (n == null) { n = new MutableInt(); eventCounters.put(c, n); } n.add(1); } } public void countVoterNakEvent(PollNak nak) { synchronized (voterNakEventCounters) { MutableInt n = voterNakEventCounters.get(nak); if (n == null) { n = new MutableInt(); voterNakEventCounters.put(nak, n); } n.add(1); } countEvent(EventCtr.Declined); } public void countPollEndEvent(int status) { synchronized (pollEndEventCounters) { pollEndEventCounters[status]++; } } public int getEventCount(EventCtr c) { synchronized (eventCounters) { MutableInt n = eventCounters.get(c); return n == null ? 0 : n.intValue(); } } public int getVoterNakEventCount(PollNak c) { synchronized (voterNakEventCounters) { MutableInt n = voterNakEventCounters.get(c); return n == null ? 0 : n.intValue(); } } public int getPollEndEventCount(int status) { return pollEndEventCounters[status]; } }
Refactor to bring all of the pollMapLock actions into one class, along with the3Polls and TheRecentPolls. git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@18975 4f837ed2-42f5-46e7-a7a5-fa17313484d4
src/org/lockss/poller/PollManager.java
Refactor to bring all of the pollMapLock actions into one class, along with the3Polls and TheRecentPolls.
Java
mit
b8314110507f95ab775fa1c6c8e08d19103a4919
0
lemmy/tlaplus,lemmy/tlaplus,tlaplus/tlaplus,tlaplus/tlaplus,tlaplus/tlaplus,tlaplus/tlaplus,lemmy/tlaplus,lemmy/tlaplus
/******************************************************************************* * Copyright (c) 2020 Microsoft Research. All rights reserved. * * The MIT License (MIT) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * Contributors: * Markus Alexander Kuppe - initial API and implementation ******************************************************************************/ package tlc2.tool.impl; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import tla2sany.semantic.ASTConstants; import tla2sany.semantic.OpApplNode; import tla2sany.semantic.SemanticNode; import tlc2.debug.IDebugTarget; import tlc2.tool.Action; import tlc2.tool.EvalControl; import tlc2.tool.IActionItemList; import tlc2.tool.INextStateFunctor; import tlc2.tool.IStateFunctor; import tlc2.tool.TLCState; import tlc2.tool.coverage.CostModel; import tlc2.util.Context; import tlc2.value.impl.Value; import util.FilenameToStream; @SuppressWarnings("serial") public class DebugTool extends Tool { private static final Set<Integer> KINDS = new HashSet<>( Arrays.asList(ASTConstants.NumeralKind, ASTConstants.DecimalKind, ASTConstants.StringKind)); private final IDebugTarget target; public DebugTool(String mainFile, String configFile, FilenameToStream resolver, IDebugTarget target) { super(mainFile, configFile, resolver); this.target = target; } @Override public final Value eval(final SemanticNode expr, final Context c, final TLCState s0, final TLCState s1, final int control, final CostModel cm) { return evalImpl(expr, c, s0, s1, control, cm); } @Override protected Value evalImpl(final SemanticNode expr, final Context c, final TLCState s0, final TLCState s1, final int control, CostModel cm) { if (EvalControl.isDebug(control)) { // Skip debugging when evaluation was triggered by the debugger itself. For // example, when LazyValues get unlazied. return super.evalImpl(expr, c, s0, s1, control, cm); } if (KINDS.contains(expr.getKind())) { // These nodes don't seem interesting to users. They are leaves and we don't // care to see how TLC figures out that then token 1 evaluates to the IntValue 1. return super.evalImpl(expr, c, s0, s1, control, cm); } if (expr.getChildren() == null || expr.getChildren().length == 0) { // Skips N and Nat in: // CONSTANT N // ASSUME N \in Nat // or the S, the f, and the 1..3 of: // LET FS == INSTANCE FiniteSets // Perms(S, a, b) == // { f \in [S -> S] : // /\ S = { f[x] : x \in DOMAIN f } // /\ \E n, m \in DOMAIN f: /\ f[n] = a // /\ f[m] = b // /\ n - m \in {1, -1} // } // IN FS!Cardinality(Perms(1..3, 1, 2)) = 4 return super.evalImpl(expr, c, s0, s1, control, cm); } // if (c.isEmpty()) { // // It is tempting to ignore also frames with an empty Context. However, ASSUMES // // for example don't have a Context. Perhaps, we should track the level here and // // ignore frames with empty Context for level greater than zero (or whatever the // // base-level is). // return super.evalImpl(expr, c, s0, s1, control, cm); // } // target is null during instantiation of super, ie. eager evaluation of // operators in SpecProcessor. if (target != null) target.pushFrame(this, expr, c, s0, s1); final Value v = super.evalImpl(expr, c, s0, s1, control, cm); if (target != null) target.popFrame(this, expr, c, s0, s1); return v; } @Override protected final Value evalAppl(final OpApplNode expr, final Context c, final TLCState s0, final TLCState s1, final int control, final CostModel cm) { return evalApplImpl(expr, c, s0, s1, control, cm); } @Override protected final Value setSource(final SemanticNode expr, final Value value) { value.setSource(expr); return value; } @Override public final TLCState enabled(final SemanticNode pred, final IActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final CostModel cm) { return enabledImpl(pred, (ActionItemList) acts, c, s0, s1, cm); } @Override protected final TLCState enabledAppl(final OpApplNode pred, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final CostModel cm) { return enabledApplImpl(pred, acts, c, s0, s1, cm); } @Override protected final TLCState enabledUnchanged(final SemanticNode expr, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final CostModel cm) { return enabledUnchangedImpl(expr, acts, c, s0, s1, cm); } @Override protected final TLCState getNextStates(final Action action, final SemanticNode pred, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final INextStateFunctor nss, final CostModel cm) { return getNextStatesImpl(action, pred, acts, c, s0, s1, nss, cm); } @Override protected final TLCState getNextStatesAppl(final Action action, final OpApplNode pred, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final INextStateFunctor nss, final CostModel cm) { target.pushFrame(this, pred, c, s0, s1); TLCState s = getNextStatesApplImpl(action, pred, acts, c, s0, s1, nss, cm); target.popFrame(this, pred, c, s0, s1); return s; } @Override protected final TLCState processUnchanged(final Action action, final SemanticNode expr, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final INextStateFunctor nss, final CostModel cm) { return processUnchangedImpl(action, expr, acts, c, s0, s1, nss, cm); } @Override protected void getInitStates(SemanticNode init, ActionItemList acts, Context c, TLCState ps, IStateFunctor states, CostModel cm) { if (states instanceof WrapperStateFunctor) { // Wrap the IStateFunctor so we can intercept Tool adding a new state to the // functor. Without it, the debugger wouldn't show the fully assigned state and // the variable that is assigned last will always be null. super.getInitStates(init, acts, c, ps, states, cm); } else { super.getInitStates(init, acts, c, ps, new WrapperStateFunctor(states, target), cm); } } @Override protected void getInitStatesAppl(OpApplNode init, ActionItemList acts, Context c, TLCState ps, IStateFunctor states, CostModel cm) { target.pushFrame(this, init, c, ps); super.getInitStatesAppl(init, acts, c, ps, states, cm); target.popFrame(this, init, c, ps); } @Override public boolean getNextStates(final INextStateFunctor functor, final TLCState state) { if (functor instanceof WrapperNextStateFunctor) { return super.getNextStates(functor, state); } else { return super.getNextStates(new WrapperNextStateFunctor(functor, target), state); } } private static class WrapperStateFunctor implements IStateFunctor { protected final IStateFunctor functor; protected final IDebugTarget target; WrapperStateFunctor(IStateFunctor functor, IDebugTarget target) { this.functor = functor; this.target = target; } @Override public Object addElement(TLCState state) { target.pushFrame(state); Object addElement = functor.addElement(state); target.popFrame(state); return addElement; } } private static class WrapperNextStateFunctor extends WrapperStateFunctor implements INextStateFunctor { WrapperNextStateFunctor(INextStateFunctor functor, IDebugTarget target) { super(functor, target); } @Override public Object addElement(TLCState predecessor, Action a, TLCState state) { target.pushFrame(predecessor, state); Object addElement = ((INextStateFunctor) functor).addElement(predecessor, a, state); target.popFrame(predecessor, state); return addElement; } } }
tlatools/org.lamport.tlatools/src/tlc2/tool/impl/DebugTool.java
/******************************************************************************* * Copyright (c) 2020 Microsoft Research. All rights reserved. * * The MIT License (MIT) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * Contributors: * Markus Alexander Kuppe - initial API and implementation ******************************************************************************/ package tlc2.tool.impl; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import tla2sany.semantic.ASTConstants; import tla2sany.semantic.OpApplNode; import tla2sany.semantic.SemanticNode; import tlc2.debug.IDebugTarget; import tlc2.tool.Action; import tlc2.tool.EvalControl; import tlc2.tool.IActionItemList; import tlc2.tool.INextStateFunctor; import tlc2.tool.IStateFunctor; import tlc2.tool.TLCState; import tlc2.tool.coverage.CostModel; import tlc2.util.Context; import tlc2.value.impl.Value; import util.FilenameToStream; @SuppressWarnings("serial") public class DebugTool extends Tool { private static final Set<Integer> KINDS = new HashSet<>( Arrays.asList(ASTConstants.NumeralKind, ASTConstants.DecimalKind, ASTConstants.StringKind)); private final IDebugTarget target; public DebugTool(String mainFile, String configFile, FilenameToStream resolver, IDebugTarget target) { super(mainFile, configFile, resolver); this.target = target; } @Override public final Value eval(final SemanticNode expr, final Context c, final TLCState s0, final TLCState s1, final int control, final CostModel cm) { return evalImpl(expr, c, s0, s1, control, cm); } @Override protected Value evalImpl(final SemanticNode expr, final Context c, final TLCState s0, final TLCState s1, final int control, CostModel cm) { if (EvalControl.isDebug(control)) { // Skip debugging when evaluation was triggered by the debugger itself. For // example, when LazyValues get unlazied. return super.evalImpl(expr, c, s0, s1, control, cm); } if (KINDS.contains(expr.getKind())) { // These nodes don't seem interesting to users. They are leaves and we don't // care to see how TLC figures out that then token 1 evaluates to the IntValue 1. return super.evalImpl(expr, c, s0, s1, control, cm); } if (expr.getChildren() == null || expr.getChildren().length == 0) { // Skips N and Nat in: // CONSTANT N // ASSUME N \in Nat // or the S, the f, and the 1..3 of: // LET FS == INSTANCE FiniteSets // Perms(S, a, b) == // { f \in [S -> S] : // /\ S = { f[x] : x \in DOMAIN f } // /\ \E n, m \in DOMAIN f: /\ f[n] = a // /\ f[m] = b // /\ n - m \in {1, -1} // } // IN FS!Cardinality(Perms(1..3, 1, 2)) = 4 return super.evalImpl(expr, c, s0, s1, control, cm); } // if (c.isEmpty()) { // // It is tempting to ignore also frames with an empty Context. However, ASSUMES // // for example don't have a Context. Perhaps, we should track the level here and // // ignore frames with empty Context for level greater than zero (or whatever the // // base-level is). // return super.evalImpl(expr, c, s0, s1, control, cm); // } target.pushFrame(this, expr, c, s0, s1); final Value v = super.evalImpl(expr, c, s0, s1, control, cm); target.popFrame(this, expr, c, s0, s1); return v; } @Override protected final Value evalAppl(final OpApplNode expr, final Context c, final TLCState s0, final TLCState s1, final int control, final CostModel cm) { return evalApplImpl(expr, c, s0, s1, control, cm); } @Override protected final Value setSource(final SemanticNode expr, final Value value) { value.setSource(expr); return value; } @Override public final TLCState enabled(final SemanticNode pred, final IActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final CostModel cm) { return enabledImpl(pred, (ActionItemList) acts, c, s0, s1, cm); } @Override protected final TLCState enabledAppl(final OpApplNode pred, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final CostModel cm) { return enabledApplImpl(pred, acts, c, s0, s1, cm); } @Override protected final TLCState enabledUnchanged(final SemanticNode expr, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final CostModel cm) { return enabledUnchangedImpl(expr, acts, c, s0, s1, cm); } @Override protected final TLCState getNextStates(final Action action, final SemanticNode pred, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final INextStateFunctor nss, final CostModel cm) { return getNextStatesImpl(action, pred, acts, c, s0, s1, nss, cm); } @Override protected final TLCState getNextStatesAppl(final Action action, final OpApplNode pred, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final INextStateFunctor nss, final CostModel cm) { target.pushFrame(this, pred, c, s0, s1); TLCState s = getNextStatesApplImpl(action, pred, acts, c, s0, s1, nss, cm); target.popFrame(this, pred, c, s0, s1); return s; } @Override protected final TLCState processUnchanged(final Action action, final SemanticNode expr, final ActionItemList acts, final Context c, final TLCState s0, final TLCState s1, final INextStateFunctor nss, final CostModel cm) { return processUnchangedImpl(action, expr, acts, c, s0, s1, nss, cm); } @Override protected void getInitStates(SemanticNode init, ActionItemList acts, Context c, TLCState ps, IStateFunctor states, CostModel cm) { if (states instanceof WrapperStateFunctor) { // Wrap the IStateFunctor so we can intercept Tool adding a new state to the // functor. Without it, the debugger wouldn't show the fully assigned state and // the variable that is assigned last will always be null. super.getInitStates(init, acts, c, ps, states, cm); } else { super.getInitStates(init, acts, c, ps, new WrapperStateFunctor(states, target), cm); } } @Override protected void getInitStatesAppl(OpApplNode init, ActionItemList acts, Context c, TLCState ps, IStateFunctor states, CostModel cm) { target.pushFrame(this, init, c, ps); super.getInitStatesAppl(init, acts, c, ps, states, cm); target.popFrame(this, init, c, ps); } @Override public boolean getNextStates(final INextStateFunctor functor, final TLCState state) { if (functor instanceof WrapperNextStateFunctor) { return super.getNextStates(functor, state); } else { return super.getNextStates(new WrapperNextStateFunctor(functor, target), state); } } private static class WrapperStateFunctor implements IStateFunctor { protected final IStateFunctor functor; protected final IDebugTarget target; WrapperStateFunctor(IStateFunctor functor, IDebugTarget target) { this.functor = functor; this.target = target; } @Override public Object addElement(TLCState state) { target.pushFrame(state); Object addElement = functor.addElement(state); target.popFrame(state); return addElement; } } private static class WrapperNextStateFunctor extends WrapperStateFunctor implements INextStateFunctor { WrapperNextStateFunctor(INextStateFunctor functor, IDebugTarget target) { super(functor, target); } @Override public Object addElement(TLCState predecessor, Action a, TLCState state) { target.pushFrame(predecessor, state); Object addElement = ((INextStateFunctor) functor).addElement(predecessor, a, state); target.popFrame(predecessor, state); return addElement; } } }
target is null during super call in constructor, which triggers eager evaluation in SpecProcessor. [Bug][Debugger]
tlatools/org.lamport.tlatools/src/tlc2/tool/impl/DebugTool.java
target is null during super call in constructor, which triggers eager evaluation in SpecProcessor.
Java
mit
d35bd4bfaee34bed33e55717e9e6c16c49bc1758
0
WorldCretornica/PlotMe-AbstractGenerator
package com.worldcretornica.plotme_abstractgenerator.bukkit; import com.worldcretornica.plotme_abstractgenerator.AbstractGenerator; import com.worldcretornica.plotme_abstractgenerator.WorldGenConfig; import com.worldcretornica.plotme_core.bukkit.api.IBukkitPlotMe_GeneratorManager; import org.bukkit.*; import org.bukkit.block.Biome; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.*; import org.bukkit.inventory.ItemStack; import java.util.*; import java.util.logging.Level; import static com.worldcretornica.plotme_abstractgenerator.AbstractWorldConfigPath.GROUND_LEVEL; import static com.worldcretornica.plotme_abstractgenerator.AbstractWorldConfigPath.PLOT_SIZE; public abstract class BukkitAbstractGenManager implements IBukkitPlotMe_GeneratorManager { // List of blocks that should be placed last in world generation private static final Collection<Integer> blockPlacedLast = new HashSet<>(); private final AbstractGenerator plugin; private final Map<String, WorldGenConfig> worldConfigs; @SuppressWarnings("deprecation") public BukkitAbstractGenManager(AbstractGenerator instance) { plugin = instance; worldConfigs = new HashMap<>(); blockPlacedLast.add(Material.SAPLING.getId()); blockPlacedLast.add(Material.BED.getId()); blockPlacedLast.add(Material.POWERED_RAIL.getId()); blockPlacedLast.add(Material.DETECTOR_RAIL.getId()); blockPlacedLast.add(Material.LONG_GRASS.getId()); blockPlacedLast.add(Material.DEAD_BUSH.getId()); blockPlacedLast.add(Material.PISTON_EXTENSION.getId()); blockPlacedLast.add(Material.YELLOW_FLOWER.getId()); blockPlacedLast.add(Material.RED_ROSE.getId()); blockPlacedLast.add(Material.BROWN_MUSHROOM.getId()); blockPlacedLast.add(Material.RED_MUSHROOM.getId()); blockPlacedLast.add(Material.TORCH.getId()); blockPlacedLast.add(Material.FIRE.getId()); blockPlacedLast.add(Material.REDSTONE_WIRE.getId()); blockPlacedLast.add(Material.CROPS.getId()); blockPlacedLast.add(Material.LADDER.getId()); blockPlacedLast.add(Material.RAILS.getId()); blockPlacedLast.add(Material.LEVER.getId()); blockPlacedLast.add(Material.STONE_PLATE.getId()); blockPlacedLast.add(Material.WOOD_PLATE.getId()); blockPlacedLast.add(Material.REDSTONE_TORCH_OFF.getId()); blockPlacedLast.add(Material.REDSTONE_TORCH_ON.getId()); blockPlacedLast.add(Material.STONE_BUTTON.getId()); blockPlacedLast.add(Material.SNOW.getId()); blockPlacedLast.add(Material.PORTAL.getId()); blockPlacedLast.add(Material.DIODE_BLOCK_OFF.getId()); blockPlacedLast.add(Material.DIODE_BLOCK_ON.getId()); blockPlacedLast.add(Material.TRAP_DOOR.getId()); blockPlacedLast.add(Material.VINE.getId()); blockPlacedLast.add(Material.WATER_LILY.getId()); blockPlacedLast.add(Material.NETHER_WARTS.getId()); blockPlacedLast.add(Material.PISTON_BASE.getId()); blockPlacedLast.add(Material.PISTON_STICKY_BASE.getId()); blockPlacedLast.add(Material.PISTON_EXTENSION.getId()); blockPlacedLast.add(Material.PISTON_MOVING_PIECE.getId()); blockPlacedLast.add(Material.COCOA.getId()); blockPlacedLast.add(Material.TRIPWIRE_HOOK.getId()); blockPlacedLast.add(Material.TRIPWIRE.getId()); blockPlacedLast.add(Material.FLOWER_POT.getId()); blockPlacedLast.add(Material.CARROT.getId()); blockPlacedLast.add(Material.POTATO.getId()); blockPlacedLast.add(Material.WOOD_BUTTON.getId()); blockPlacedLast.add(Material.SKULL.getId()); blockPlacedLast.add(Material.GOLD_PLATE.getId()); blockPlacedLast.add(Material.IRON_PLATE.getId()); blockPlacedLast.add(Material.REDSTONE_COMPARATOR_OFF.getId()); blockPlacedLast.add(Material.REDSTONE_COMPARATOR_ON.getId()); blockPlacedLast.add(Material.ACTIVATOR_RAIL.getId()); } public static void clearEntities(Location bottom, Location top) { int bottomX = bottom.getBlockX(); int topX = top.getBlockX(); int bottomZ = bottom.getBlockZ(); int topZ = top.getBlockZ(); World world = bottom.getWorld(); int minChunkX = (int) Math.floor(bottomX / 16); int maxChunkX = (int) Math.floor(topX / 16); int minChunkZ = (int) Math.floor(bottomZ / 16); int maxChunkZ = (int) Math.floor(topZ / 16); for (int cx = minChunkX; cx <= maxChunkX; cx++) { for (int cz = minChunkZ; cz <= maxChunkZ; cz++) { Chunk chunk = world.getChunkAt(cx, cz); for (Entity entity : chunk.getEntities()) { Location location = entity.getLocation(); if (!(entity instanceof Player) && location.getBlockX() >= bottom.getBlockX() && location.getBlockX() <= top.getBlockX() && location.getBlockZ() >= bottom.getBlockZ() && location.getBlockZ() <= top.getBlockZ()) { entity.remove(); } } } } } private static int[] getPaintingMod(Art a, BlockFace bf) { int H = a.getBlockHeight(); int W = a.getBlockWidth(); //Same for all faces if (H == 2 && W == 1) { return new int[]{0, -1, 0}; } switch (bf) { case WEST: if (H == 3 && W == 4 || H == 1 && W == 2) { return new int[]{0, 0, -1}; } else if (H == 2 && W == 2 || H == 4 && W == 4 || H == 2 && W == 4) { return new int[]{0, -1, -1}; } break; case SOUTH: if (H == 3 && W == 4 || H == 1 && W == 2) { return new int[]{-1, 0, 0}; } else if (H == 2 && W == 2 || H == 4 && W == 4 || H == 2 && W == 4) { return new int[]{-1, -1, 0}; } break; case EAST: if (H == 2 && W == 2 || H == 4 && W == 4 || H == 2 && W == 4) { return new int[]{0, -1, 0}; } break; case NORTH: if (H == 2 && W == 2 || H == 4 && W == 4 || H == 2 && W == 4) { return new int[]{0, -1, 0}; } break; default: return new int[]{0, 0, 0}; } return new int[]{0, 0, 0}; } public WorldGenConfig getWGC(World world) { return getWGC(world.getName()); } public WorldGenConfig getWGC(String world) { return worldConfigs.get(world.toLowerCase()); } public WorldGenConfig putWGC(String worldname, WorldGenConfig wgc) { return worldConfigs.put(worldname.toLowerCase(), wgc); } public boolean containsWGC(World world) { return containsWGC(world.getName()); } public boolean containsWGC(String worldname) { return worldConfigs.containsKey(worldname.toLowerCase()); } @Override public int getPlotSize(String worldname) { if (getWGC(worldname) != null) { return getWGC(worldname).getInt(PLOT_SIZE); } else { plugin.getLogger().log(Level.WARNING, "Tried to get plot size for undefined world '{0}'", worldname); return 0; } } @Override public boolean createConfig(String worldname, Map<String, String> args) { WorldGenConfig wgc = plugin.getWorldGenConfig(worldname); for (String key : args.keySet()) { wgc.set(key, args.get(key)); } return true; } @Override public Map<String, String> getDefaultGenerationConfig() { // TODO: Either change interface or change WGC //return WorldGenConfig.cloneDefaults(); throw new UnsupportedOperationException("Not supported yet. Either change interface or change WGC."); } @Override public int getRoadHeight(String worldname) { if (containsWGC(worldname)) { return getWGC(worldname).getInt(GROUND_LEVEL); } else { plugin.getLogger().log(Level.WARNING, "Tried to get road height for undefined world '{0}'", worldname); return 64; } } @Override public String getPlotId(Player player) { return getPlotId(player.getLocation()); } @Override public List<Player> getPlayersInPlot(String id) { List<Player> playersInPlot = new ArrayList<>(); for (Player p : Bukkit.getOnlinePlayers()) { if (getPlotId(p).equals(id)) { playersInPlot.add(p); } } return playersInPlot; } @Override public void setBiome(World world, String id, Biome biome) { int bottomX = bottomX(id, world) - 1; int topX = topX(id, world) + 1; int bottomZ = bottomZ(id, world) - 1; int topZ = topZ(id, world) + 1; for (int x = bottomX; x <= topX; x++) { for (int z = bottomZ; z <= topZ; z++) { world.getBlockAt(x, 0, z).setBiome(biome); } } refreshPlotChunks(world, id); } @Override public void refreshPlotChunks(World world, String id) { int bottomX = bottomX(id, world); int topX = topX(id, world); int bottomZ = bottomZ(id, world); int topZ = topZ(id, world); int minChunkX = (int) Math.floor(bottomX / 16); int maxChunkX = (int) Math.floor(topX / 16); int minChunkZ = (int) Math.floor(bottomZ / 16); int maxChunkZ = (int) Math.floor(topZ / 16); for (int x = minChunkX; x <= maxChunkX; x++) { for (int z = minChunkZ; z <= maxChunkZ; z++) { world.refreshChunk(x, z); } } } @Override public Location getTop(World world, String id) { return getPlotTopLoc(world, id); } @Override public Location getBottom(World world, String id) { return getPlotBottomLoc(world, id); } @Override public void clear(World world, String id) { clear(getBottom(world, id), getTop(world, id)); } @Override public Long[] clear(World world, String id, long maxBlocks, Long[] start) { return clear(getBottom(world, id), getTop(world, id), maxBlocks, start); } @Override public boolean isBlockInPlot(String id, Location location) { World world = location.getWorld(); int lowestX = Math.min(bottomX(id, world), topX(id, world)); int highestX = Math.max(bottomX(id, world), topX(id, world)); int lowestZ = Math.min(bottomZ(id, world), topZ(id, world)); int highestZ = Math.max(bottomZ(id, world), topZ(id, world)); return location.getBlockX() >= lowestX && location.getBlockX() <= highestX && location.getBlockZ() >= lowestZ && location.getBlockZ() <= highestZ; } @SuppressWarnings("deprecation") @Override public boolean movePlot(World world, String idFrom, String idTo) { Location plot1Bottom = getPlotBottomLoc(world, idFrom); Location plot2Bottom = getPlotBottomLoc(world, idTo); Location plot1Top = getPlotTopLoc(world, idFrom); Location plot2Top = getPlotTopLoc(world, idTo); int distanceX = plot1Bottom.getBlockX() - plot2Bottom.getBlockX(); int distanceZ = plot1Bottom.getBlockZ() - plot2Bottom.getBlockZ(); Collection<BlockInfo> lastblocks = new HashSet<>(); int bottomX = plot1Bottom.getBlockX(); int topX = plot1Top.getBlockX(); int bottomZ = plot1Bottom.getBlockZ(); int topZ = plot1Top.getBlockZ(); for (int x = bottomX; x <= topX; x++) { for (int z = bottomZ; z <= topZ; z++) { Block plot1Block = world.getBlockAt(x, 0, z); BukkitBlockRepresentation plot1BlockRepresentation = new BukkitBlockRepresentation(plot1Block); Block plot2Block = world.getBlockAt(x - distanceX, 0, z - distanceZ); BukkitBlockRepresentation plot2BlockRepresentation = new BukkitBlockRepresentation(plot2Block); plot1Block.setBiome(plot2Block.getBiome()); plot2Block.setBiome(plot1Block.getBiome()); for (int y = 0; y < 256; y++) { plot1Block = world.getBlockAt(x, y, z); plot2Block = world.getBlockAt(x - distanceX, y, z - distanceZ); if (!blockPlacedLast.contains((int) plot2BlockRepresentation.getId())) { plot2BlockRepresentation.setBlock(plot1Block, false); } else { plot1Block.setType(Material.AIR); lastblocks.add(new BlockInfo(plot2BlockRepresentation, world, x, y, z)); } if (!blockPlacedLast.contains((int) plot1BlockRepresentation.getId())) { plot1BlockRepresentation.setBlock(plot2Block, false); } else { plot2Block.setType(Material.AIR); lastblocks.add(new BlockInfo(plot1BlockRepresentation, world, x - distanceX, y, z - distanceZ)); } } } } for (BlockInfo bi : lastblocks) { Block block = bi.loc.getBlock(); bi.block.setBlock(block, false); } lastblocks.clear(); //Move entities int minChunkX1 = (int) Math.floor(bottomX / 16); int maxChunkX1 = (int) Math.floor(topX / 16); int minChunkZ1 = (int) Math.floor(bottomZ / 16); int maxChunkZ1 = (int) Math.floor(topZ / 16); int minChunkX2 = (int) Math.floor((bottomX - distanceX) / 16); int maxChunkX2 = (int) Math.floor((topX - distanceX) / 16); int minChunkZ2 = (int) Math.floor((bottomZ - distanceZ) / 16); int maxChunkZ2 = (int) Math.floor((topZ - distanceZ) / 16); Collection<Entity> entities1 = new HashSet<>(); Collection<Entity> entities2 = new HashSet<>(); for (int cx = minChunkX1; cx <= maxChunkX1; cx++) { for (int cz = minChunkZ1; cz <= maxChunkZ1; cz++) { Chunk chunk = world.getChunkAt(cx, cz); for (Entity entity : chunk.getEntities()) { Location location = entity.getLocation(); if (!(entity instanceof Player) /*&& !(entity instanceof Hanging)*/ && location.getBlockX() >= plot1Bottom.getBlockX() && location.getBlockX() <= plot1Top.getBlockX() && location.getBlockZ() >= plot1Bottom.getBlockZ() && location.getBlockZ() <= plot1Top.getBlockZ()) { entities1.add(entity); } } } } for (int cx = minChunkX2; cx <= maxChunkX2; cx++) { for (int cz = minChunkZ2; cz <= maxChunkZ2; cz++) { Chunk chunk = world.getChunkAt(cx, cz); for (Entity entity : chunk.getEntities()) { Location location = entity.getLocation(); if (!(entity instanceof Player) /*&& !(entity instanceof Hanging)*/ && location.getBlockX() >= plot2Bottom.getBlockX() && location.getBlockX() <= plot2Top.getBlockX() && location.getBlockZ() >= plot2Bottom.getBlockZ() && location.getBlockZ() <= plot2Top.getBlockZ()) { entities2.add(entity); } } } } for (Entity e : entities1) { Location location = e.getLocation(); Location newl = new Location(world, location.getX() - distanceX, location.getY(), location.getZ() - distanceZ); if (e.getType() == EntityType.ITEM_FRAME) { ItemFrame i = ((ItemFrame) e); BlockFace bf = i.getFacing(); ItemStack is = i.getItem(); Rotation rot = i.getRotation(); i.teleport(newl); i.setItem(is); i.setRotation(rot); i.setFacingDirection(bf, true); } else if (e.getType() == EntityType.PAINTING) { Painting p = ((Painting) e); BlockFace bf = p.getFacing(); int[] mod = getPaintingMod(p.getArt(), bf); newl = newl.add(mod[0], mod[1], mod[2]); p.teleport(newl); p.setFacingDirection(bf, true); } else { e.teleport(newl); } } for (Entity entity : entities2) { Location location = entity.getLocation(); Location newl = new Location(world, location.getX() + distanceX, location.getY(), location.getZ() + distanceZ); if (entity.getType() == EntityType.ITEM_FRAME) { ItemFrame i = ((ItemFrame) entity); BlockFace bf = i.getFacing(); ItemStack is = i.getItem(); Rotation rot = i.getRotation(); i.teleport(newl); i.setItem(is); i.setRotation(rot); i.setFacingDirection(bf, true); } else if (entity.getType() == EntityType.PAINTING) { Painting p = ((Painting) entity); BlockFace bf = p.getFacing(); int[] mod = getPaintingMod(p.getArt(), bf); newl = newl.add(mod[0], mod[1], mod[2]); p.teleport(newl); p.setFacingDirection(bf, true); } else { entity.teleport(newl); } } return true; } @Override public int bottomX(String id, World world) { return getPlotBottomLoc(world, id).getBlockX(); } @Override public int bottomZ(String id, World world) { return getPlotBottomLoc(world, id).getBlockZ(); } @Override public int topX(String id, World world) { return getPlotTopLoc(world, id).getBlockX(); } @Override public int topZ(String id, World world) { return getPlotTopLoc(world, id).getBlockZ(); } @SuppressWarnings("ResultOfMethodCallIgnored") @Override public boolean isValidId(String id) { String[] coords = id.split(";"); if (coords.length == 2) { try { Integer.parseInt(coords[0]); Integer.parseInt(coords[1]); return true; } catch (NumberFormatException e) { return false; } } else { return false; } } @Override public int getIdX(String id) { return Integer.parseInt(id.substring(0, id.indexOf(";"))); } @Override public int getIdZ(String id) { return Integer.parseInt(id.substring(id.indexOf(";") + 1)); } }
src/main/java/com/worldcretornica/plotme_abstractgenerator/bukkit/BukkitAbstractGenManager.java
package com.worldcretornica.plotme_abstractgenerator.bukkit; import com.worldcretornica.plotme_abstractgenerator.AbstractGenerator; import com.worldcretornica.plotme_abstractgenerator.WorldGenConfig; import com.worldcretornica.plotme_core.bukkit.api.IBukkitPlotMe_GeneratorManager; import org.bukkit.*; import org.bukkit.block.Biome; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.*; import org.bukkit.inventory.ItemStack; import java.util.*; import java.util.logging.Level; import static com.worldcretornica.plotme_abstractgenerator.AbstractWorldConfigPath.GROUND_LEVEL; import static com.worldcretornica.plotme_abstractgenerator.AbstractWorldConfigPath.PLOT_SIZE; public abstract class BukkitAbstractGenManager implements IBukkitPlotMe_GeneratorManager { // List of blocks that should be placed last in world generation private static final Collection<Integer> blockPlacedLast = new HashSet<>(); private final AbstractGenerator plugin; private final Map<String, WorldGenConfig> worldConfigs; @SuppressWarnings("deprecation") public BukkitAbstractGenManager(AbstractGenerator instance) { plugin = instance; worldConfigs = new HashMap<>(); blockPlacedLast.add(Material.SAPLING.getId()); blockPlacedLast.add(Material.BED.getId()); blockPlacedLast.add(Material.POWERED_RAIL.getId()); blockPlacedLast.add(Material.DETECTOR_RAIL.getId()); blockPlacedLast.add(Material.LONG_GRASS.getId()); blockPlacedLast.add(Material.DEAD_BUSH.getId()); blockPlacedLast.add(Material.PISTON_EXTENSION.getId()); blockPlacedLast.add(Material.YELLOW_FLOWER.getId()); blockPlacedLast.add(Material.RED_ROSE.getId()); blockPlacedLast.add(Material.BROWN_MUSHROOM.getId()); blockPlacedLast.add(Material.RED_MUSHROOM.getId()); blockPlacedLast.add(Material.TORCH.getId()); blockPlacedLast.add(Material.FIRE.getId()); blockPlacedLast.add(Material.REDSTONE_WIRE.getId()); blockPlacedLast.add(Material.CROPS.getId()); blockPlacedLast.add(Material.LADDER.getId()); blockPlacedLast.add(Material.RAILS.getId()); blockPlacedLast.add(Material.LEVER.getId()); blockPlacedLast.add(Material.STONE_PLATE.getId()); blockPlacedLast.add(Material.WOOD_PLATE.getId()); blockPlacedLast.add(Material.REDSTONE_TORCH_OFF.getId()); blockPlacedLast.add(Material.REDSTONE_TORCH_ON.getId()); blockPlacedLast.add(Material.STONE_BUTTON.getId()); blockPlacedLast.add(Material.SNOW.getId()); blockPlacedLast.add(Material.PORTAL.getId()); blockPlacedLast.add(Material.DIODE_BLOCK_OFF.getId()); blockPlacedLast.add(Material.DIODE_BLOCK_ON.getId()); blockPlacedLast.add(Material.TRAP_DOOR.getId()); blockPlacedLast.add(Material.VINE.getId()); blockPlacedLast.add(Material.WATER_LILY.getId()); blockPlacedLast.add(Material.NETHER_WARTS.getId()); blockPlacedLast.add(Material.PISTON_BASE.getId()); blockPlacedLast.add(Material.PISTON_STICKY_BASE.getId()); blockPlacedLast.add(Material.PISTON_EXTENSION.getId()); blockPlacedLast.add(Material.PISTON_MOVING_PIECE.getId()); blockPlacedLast.add(Material.COCOA.getId()); blockPlacedLast.add(Material.TRIPWIRE_HOOK.getId()); blockPlacedLast.add(Material.TRIPWIRE.getId()); blockPlacedLast.add(Material.FLOWER_POT.getId()); blockPlacedLast.add(Material.CARROT.getId()); blockPlacedLast.add(Material.POTATO.getId()); blockPlacedLast.add(Material.WOOD_BUTTON.getId()); blockPlacedLast.add(Material.SKULL.getId()); blockPlacedLast.add(Material.GOLD_PLATE.getId()); blockPlacedLast.add(Material.IRON_PLATE.getId()); blockPlacedLast.add(Material.REDSTONE_COMPARATOR_OFF.getId()); blockPlacedLast.add(Material.REDSTONE_COMPARATOR_ON.getId()); blockPlacedLast.add(Material.ACTIVATOR_RAIL.getId()); } public static void clearEntities(Location bottom, Location top) { int bottomX = bottom.getBlockX(); int topX = top.getBlockX(); int bottomZ = bottom.getBlockZ(); int topZ = top.getBlockZ(); World world = bottom.getWorld(); int minChunkX = (int) Math.floor(bottomX / 16); int maxChunkX = (int) Math.floor(topX / 16); int minChunkZ = (int) Math.floor(bottomZ / 16); int maxChunkZ = (int) Math.floor(topZ / 16); for (int cx = minChunkX; cx <= maxChunkX; cx++) { for (int cz = minChunkZ; cz <= maxChunkZ; cz++) { Chunk chunk = world.getChunkAt(cx, cz); for (Entity entity : chunk.getEntities()) { Location location = entity.getLocation(); if (!(entity instanceof Player) && location.getBlockX() >= bottom.getBlockX() && location.getBlockX() <= top.getBlockX() && location.getBlockZ() >= bottom.getBlockZ() && location.getBlockZ() <= top.getBlockZ()) { entity.remove(); } } } } } private static int[] getPaintingMod(Art a, BlockFace bf) { int H = a.getBlockHeight(); int W = a.getBlockWidth(); //Same for all faces if (H == 2 && W == 1) { return new int[]{0, -1, 0}; } switch (bf) { case WEST: if (H == 3 && W == 4 || H == 1 && W == 2) { return new int[]{0, 0, -1}; } else if (H == 2 && W == 2 || H == 4 && W == 4 || H == 2 && W == 4) { return new int[]{0, -1, -1}; } break; case SOUTH: if (H == 3 && W == 4 || H == 1 && W == 2) { return new int[]{-1, 0, 0}; } else if (H == 2 && W == 2 || H == 4 && W == 4 || H == 2 && W == 4) { return new int[]{-1, -1, 0}; } break; case EAST: if (H == 2 && W == 2 || H == 4 && W == 4 || H == 2 && W == 4) { return new int[]{0, -1, 0}; } break; case NORTH: if (H == 2 && W == 2 || H == 4 && W == 4 || H == 2 && W == 4) { return new int[]{0, -1, 0}; } break; default: return new int[]{0, 0, 0}; } return new int[]{0, 0, 0}; } public WorldGenConfig getWGC(World world) { return getWGC(world.getName()); } public WorldGenConfig getWGC(String world) { return worldConfigs.get(world.toLowerCase()); } public WorldGenConfig putWGC(String worldname, WorldGenConfig wgc) { return worldConfigs.put(worldname.toLowerCase(), wgc); } public boolean containsWGC(World world) { return containsWGC(world.getName()); } public boolean containsWGC(String worldname) { return worldConfigs.containsKey(worldname.toLowerCase()); } @Override public int getPlotSize(String worldname) { if (getWGC(worldname) != null) { return getWGC(worldname).getInt(PLOT_SIZE); } else { plugin.getLogger().log(Level.WARNING, "Tried to get plot size for undefined world '{0}'", worldname); return 0; } } @Override public boolean createConfig(String worldname, Map<String, String> args) { WorldGenConfig wgc = plugin.getWorldGenConfig(worldname); for (String key : args.keySet()) { wgc.set(key, args.get(key)); } return true; } @Override public Map<String, String> getDefaultGenerationConfig() { // TODO: Either change interface or change WGC //return WorldGenConfig.cloneDefaults(); throw new UnsupportedOperationException("Not supported yet. Either change interface or change WGC."); } @Override public int getRoadHeight(String worldname) { if (containsWGC(worldname)) { return getWGC(worldname).getInt(GROUND_LEVEL); } else { plugin.getLogger().log(Level.WARNING, "Tried to get road height for undefined world '{0}'", worldname); return 64; } } @Override public String getPlotId(Player player) { return getPlotId(player.getLocation()); } @Override public List<Player> getPlayersInPlot(String id) { List<Player> playersInPlot = new ArrayList<>(); for (Player p : Bukkit.getOnlinePlayers()) { if (getPlotId(p).equals(id)) { playersInPlot.add(p); } } return playersInPlot; } @Override public void setBiome(World world, String id, Biome biome) { int bottomX = bottomX(id, world) - 1; int topX = topX(id, world) + 1; int bottomZ = bottomZ(id, world) - 1; int topZ = topZ(id, world) + 1; for (int x = bottomX; x <= topX; x++) { for (int z = bottomZ; z <= topZ; z++) { world.getBlockAt(x, 0, z).setBiome(biome); } } refreshPlotChunks(world, id); } @Override public void refreshPlotChunks(World world, String id) { int bottomX = bottomX(id, world); int topX = topX(id, world); int bottomZ = bottomZ(id, world); int topZ = topZ(id, world); int minChunkX = (int) Math.floor((double) bottomX / 16); int maxChunkX = (int) Math.floor((double) topX / 16); int minChunkZ = (int) Math.floor((double) bottomZ / 16); int maxChunkZ = (int) Math.floor((double) topZ / 16); for (int x = minChunkX; x <= maxChunkX; x++) { for (int z = minChunkZ; z <= maxChunkZ; z++) { world.refreshChunk(x, z); } } } @Override public Location getTop(World world, String id) { return getPlotTopLoc(world, id); } @Override public Location getBottom(World world, String id) { return getPlotBottomLoc(world, id); } @Override public void clear(World world, String id) { clear(getBottom(world, id), getTop(world, id)); } @Override public Long[] clear(World world, String id, long maxBlocks, Long[] start) { return clear(getBottom(world, id), getTop(world, id), maxBlocks, start); } @Override public boolean isBlockInPlot(String id, Location location) { World world = location.getWorld(); int lowestX = Math.min(bottomX(id, world), topX(id, world)); int highestX = Math.max(bottomX(id, world), topX(id, world)); int lowestZ = Math.min(bottomZ(id, world), topZ(id, world)); int highestZ = Math.max(bottomZ(id, world), topZ(id, world)); return location.getBlockX() >= lowestX && location.getBlockX() <= highestX && location.getBlockZ() >= lowestZ && location.getBlockZ() <= highestZ; } @SuppressWarnings("deprecation") @Override public boolean movePlot(World world, String idFrom, String idTo) { Location plot1Bottom = getPlotBottomLoc(world, idFrom); Location plot2Bottom = getPlotBottomLoc(world, idTo); Location plot1Top = getPlotTopLoc(world, idFrom); Location plot2Top = getPlotTopLoc(world, idTo); int distanceX = plot1Bottom.getBlockX() - plot2Bottom.getBlockX(); int distanceZ = plot1Bottom.getBlockZ() - plot2Bottom.getBlockZ(); Collection<BlockInfo> lastblocks = new HashSet<>(); int bottomX = plot1Bottom.getBlockX(); int topX = plot1Top.getBlockX(); int bottomZ = plot1Bottom.getBlockZ(); int topZ = plot1Top.getBlockZ(); for (int x = bottomX; x <= topX; x++) { for (int z = bottomZ; z <= topZ; z++) { Block plot1Block = world.getBlockAt(x, 0, z); BukkitBlockRepresentation plot1BlockRepresentation = new BukkitBlockRepresentation(plot1Block); Block plot2Block = world.getBlockAt(x - distanceX, 0, z - distanceZ); BukkitBlockRepresentation plot2BlockRepresentation = new BukkitBlockRepresentation(plot2Block); plot1Block.setBiome(plot2Block.getBiome()); plot2Block.setBiome(plot1Block.getBiome()); for (int y = 0; y < 256; y++) { plot1Block = world.getBlockAt(x, y, z); plot2Block = world.getBlockAt(x - distanceX, y, z - distanceZ); if (!blockPlacedLast.contains((int) plot2BlockRepresentation.getId())) { plot2BlockRepresentation.setBlock(plot1Block, false); } else { plot1Block.setType(Material.AIR); lastblocks.add(new BlockInfo(plot2BlockRepresentation, world, x, y, z)); } if (!blockPlacedLast.contains((int) plot1BlockRepresentation.getId())) { plot1BlockRepresentation.setBlock(plot2Block, false); } else { plot2Block.setType(Material.AIR); lastblocks.add(new BlockInfo(plot1BlockRepresentation, world, x - distanceX, y, z - distanceZ)); } } } } for (BlockInfo bi : lastblocks) { Block block = bi.loc.getBlock(); bi.block.setBlock(block, false); } lastblocks.clear(); //Move entities int minChunkX1 = (int) Math.floor((double) bottomX / 16); int maxChunkX1 = (int) Math.floor((double) topX / 16); int minChunkZ1 = (int) Math.floor((double) bottomZ / 16); int maxChunkZ1 = (int) Math.floor((double) topZ / 16); int minChunkX2 = (int) Math.floor((double) (bottomX - distanceX) / 16); int maxChunkX2 = (int) Math.floor((double) (topX - distanceX) / 16); int minChunkZ2 = (int) Math.floor((double) (bottomZ - distanceZ) / 16); int maxChunkZ2 = (int) Math.floor((double) (topZ - distanceZ) / 16); Collection<Entity> entities1 = new HashSet<>(); Collection<Entity> entities2 = new HashSet<>(); for (int cx = minChunkX1; cx <= maxChunkX1; cx++) { for (int cz = minChunkZ1; cz <= maxChunkZ1; cz++) { Chunk chunk = world.getChunkAt(cx, cz); for (Entity entity : chunk.getEntities()) { Location location = entity.getLocation(); if (!(entity instanceof Player) /*&& !(entity instanceof Hanging)*/ && location.getBlockX() >= plot1Bottom.getBlockX() && location.getBlockX() <= plot1Top.getBlockX() && location.getBlockZ() >= plot1Bottom.getBlockZ() && location.getBlockZ() <= plot1Top.getBlockZ()) { entities1.add(entity); } } } } for (int cx = minChunkX2; cx <= maxChunkX2; cx++) { for (int cz = minChunkZ2; cz <= maxChunkZ2; cz++) { Chunk chunk = world.getChunkAt(cx, cz); for (Entity entity : chunk.getEntities()) { Location location = entity.getLocation(); if (!(entity instanceof Player) /*&& !(entity instanceof Hanging)*/ && location.getBlockX() >= plot2Bottom.getBlockX() && location.getBlockX() <= plot2Top.getBlockX() && location.getBlockZ() >= plot2Bottom.getBlockZ() && location.getBlockZ() <= plot2Top.getBlockZ()) { entities2.add(entity); } } } } for (Entity e : entities1) { Location location = e.getLocation(); Location newl = new Location(world, location.getX() - distanceX, location.getY(), location.getZ() - distanceZ); if (e.getType() == EntityType.ITEM_FRAME) { ItemFrame i = ((ItemFrame) e); BlockFace bf = i.getFacing(); ItemStack is = i.getItem(); Rotation rot = i.getRotation(); i.teleport(newl); i.setItem(is); i.setRotation(rot); i.setFacingDirection(bf, true); } else if (e.getType() == EntityType.PAINTING) { Painting p = ((Painting) e); BlockFace bf = p.getFacing(); int[] mod = getPaintingMod(p.getArt(), bf); newl = newl.add(mod[0], mod[1], mod[2]); p.teleport(newl); p.setFacingDirection(bf, true); } else { e.teleport(newl); } } for (Entity entity : entities2) { Location location = entity.getLocation(); Location newl = new Location(world, location.getX() + distanceX, location.getY(), location.getZ() + distanceZ); if (entity.getType() == EntityType.ITEM_FRAME) { ItemFrame i = ((ItemFrame) entity); BlockFace bf = i.getFacing(); ItemStack is = i.getItem(); Rotation rot = i.getRotation(); i.teleport(newl); i.setItem(is); i.setRotation(rot); i.setFacingDirection(bf, true); } else if (entity.getType() == EntityType.PAINTING) { Painting p = ((Painting) entity); BlockFace bf = p.getFacing(); int[] mod = getPaintingMod(p.getArt(), bf); newl = newl.add(mod[0], mod[1], mod[2]); p.teleport(newl); p.setFacingDirection(bf, true); } else { entity.teleport(newl); } } return true; } @Override public int bottomX(String id, World world) { return getPlotBottomLoc(world, id).getBlockX(); } @Override public int bottomZ(String id, World world) { return getPlotBottomLoc(world, id).getBlockZ(); } @Override public int topX(String id, World world) { return getPlotTopLoc(world, id).getBlockX(); } @Override public int topZ(String id, World world) { return getPlotTopLoc(world, id).getBlockZ(); } @SuppressWarnings("ResultOfMethodCallIgnored") @Override public boolean isValidId(String id) { String[] coords = id.split(";"); if (coords.length == 2) { try { Integer.parseInt(coords[0]); Integer.parseInt(coords[1]); return true; } catch (NumberFormatException e) { return false; } } else { return false; } } @Override public int getIdX(String id) { return Integer.parseInt(id.substring(0, id.indexOf(";"))); } @Override public int getIdZ(String id) { return Integer.parseInt(id.substring(id.indexOf(";") + 1)); } }
Math Changes.
src/main/java/com/worldcretornica/plotme_abstractgenerator/bukkit/BukkitAbstractGenManager.java
Math Changes.
Java
mit
5e12772d23d682b438e7aed75b92632238fa631c
0
trendrr/java-oss-lib,MarkG/java-oss-lib
/** * */ package com.trendrr.oss.concurrent; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Simple threadsafe lazy init object. * * usage: * * LazyInit<MyObject> obj = new LazyInit<MyObject>() { * @Override * public MyObject init() { * return new MyObject(); * } * } * * * MyObject my = obj.get(); * * @author Dustin Norlander * @created Aug 31, 2011 * */ public abstract class LazyInitObject<T> { protected Log log = LogFactory.getLog(LazyInitObject.class); AtomicReference<T> object; LazyInit lock = new LazyInit(); public abstract T init(); public T get() { if (lock.start()) { try { this.object.set(this.init()); } finally { lock.end(); } } return object.get(); } /** * will reset this object to initialize again (on the next get() method call) */ public void reset() { lock.reset(); } /** * atomically sets the reference and sets the init to not run. * @param object */ public void set(T object) { lock.start(); try { this.object.set(object); } finally { lock.end(); } } }
src/com/trendrr/oss/concurrent/LazyInitObject.java
/** * */ package com.trendrr.oss.concurrent; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Simple threadsafe lazy init object. * * usage: * * LazyInit<MyObject> obj = new LazyInit<MyObject>() { * @Override * public MyObject init() { * return new MyObject(); * } * } * * * MyObject my = obj.get(); * * @author Dustin Norlander * @created Aug 31, 2011 * */ public abstract class LazyInitObject<T> { protected Log log = LogFactory.getLog(LazyInitObject.class); T object; LazyInit lock = new LazyInit(); public abstract T init(); public T get() { if (lock.start()) { try { this.object = this.init(); } finally { lock.end(); } } return object; } /** * will reset this object to initialize again (on the next get() method call) */ public void reset() { lock.reset(); } }
Add set to lazy init object, to bypass init code if needed
src/com/trendrr/oss/concurrent/LazyInitObject.java
Add set to lazy init object, to bypass init code if needed
Java
mit
b89b09ef173b6d1004cfe7f2865ce530f4acedf9
0
civitaspo/embulk-input-hdfs,civitaspo/embulk-input-hdfs
package org.embulk.input.hdfs; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.hadoop.fs.Path; import org.embulk.EmbulkTestRuntime; import org.embulk.config.ConfigException; import org.embulk.config.ConfigSource; import org.embulk.config.TaskReport; import org.embulk.config.TaskSource; import org.embulk.input.hdfs.HdfsFileInputPlugin.PluginTask; import org.embulk.spi.Exec; import org.embulk.spi.FileInputPlugin; import org.embulk.spi.FileInputRunner; import org.embulk.spi.InputPlugin; import org.embulk.spi.Schema; import org.embulk.spi.TestPageBuilderReader.MockPageOutput; import org.embulk.spi.util.Pages; import org.embulk.standards.CsvParserPlugin; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.slf4j.Logger; import javax.annotation.Nullable; import java.io.File; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; public class TestHdfsFileInputPlugin { @Rule public EmbulkTestRuntime runtime = new EmbulkTestRuntime(); @Rule public ExpectedException exception = ExpectedException.none(); private Logger logger = runtime.getExec().getLogger(TestHdfsFileInputPlugin.class); private HdfsFileInputPlugin plugin; private FileInputRunner runner; private MockPageOutput output; private Path path; @Before public void createResources() { plugin = new HdfsFileInputPlugin(); runner = new FileInputRunner(runtime.getInstance(HdfsFileInputPlugin.class)); output = new MockPageOutput(); path = new Path(new File(getClass().getResource("/sample_01.csv").getPath()).getParent()); } @Test public void testDefaultValues() { ConfigSource config = Exec.newConfigSource() .set("path", path.toString()); PluginTask task = config.loadConfig(PluginTask.class); assertEquals(path.toString(), task.getPath()); assertEquals(Lists.newArrayList(), task.getConfigFiles()); assertEquals(Maps.newHashMap(), task.getConfig()); assertEquals(true, task.getPartition()); assertEquals(0, task.getRewindSeconds()); assertEquals(-1, task.getApproximateNumPartitions()); assertEquals(0, task.getSkipHeaderLines()); assertEquals(false, task.getUseCompressionCodec()); } @Test(expected = ConfigException.class) public void testRequiredValues() { ConfigSource config = Exec.newConfigSource(); PluginTask task = config.loadConfig(PluginTask.class); } @Test public void testFileList() { ConfigSource config = getConfigWithDefaultValues(); config.set("num_partitions", 1); plugin.transaction(config, new FileInputPlugin.Control() { @Override public List<TaskReport> run(TaskSource taskSource, int taskCount) { PluginTask task = taskSource.loadTask(PluginTask.class); List<String> fileList = Lists.transform(Lists.newArrayList(new File(path.toString()).list()), new Function<String, String>() { @Nullable @Override public String apply(@Nullable String input) { return new File(path.toString() + "/" + input).toURI().toString(); } }); List<String> resultFList = Lists.transform(task.getFiles(), new Function<HdfsPartialFile, String>() { @Nullable @Override public String apply(@Nullable HdfsPartialFile input) { assert input != null; return input.getPath(); } }); assertEquals(fileList, resultFList); return emptyTaskReports(taskCount); } }); } @Test public void testHdfsFileInputByOpen() { ConfigSource config = getConfigWithDefaultValues(); config.set("num_partitions", 10); config.set("use_compression_codec", true); runner.transaction(config, new Control()); assertRecords(config, output, 12); } @Test public void testHdfsFileInputByOpenWithoutPartition() { ConfigSource config = getConfigWithDefaultValues(); config.set("partition", false); config.set("use_compression_codec", true); runner.transaction(config, new Control()); assertRecords(config, output, 12); } @Test public void testHdfsFileInputByOpenWithoutCompressionCodec() { ConfigSource config = getConfigWithDefaultValues(); config.set("partition", false); config.set("path", getClass().getResource("/sample_01.csv").getPath()); runner.transaction(config, new Control()); assertRecords(config, output, 4); } @Test public void testStrftime() { ConfigSource config = getConfigWithDefaultValues(); config.set("path", "/tmp/%Y-%m-%d"); config.set("rewind_seconds", 86400); PluginTask task = config.loadConfig(PluginTask.class); String result = plugin.strftime(task, task.getPath(), task.getRewindSeconds()); String expected = task.getJRuby().runScriptlet("(Time.now - 86400).strftime('/tmp/%Y-%m-%d')").toString(); assertEquals(expected, result); } private class Control implements InputPlugin.Control { @Override public List<TaskReport> run(TaskSource taskSource, Schema schema, int taskCount) { List<TaskReport> reports = new ArrayList<>(); for (int i = 0; i < taskCount; i++) { reports.add(runner.run(taskSource, schema, i, output)); } return reports; } } private ConfigSource getConfigWithDefaultValues() { return Exec.newConfigSource() .set("path", path.toString()) .set("config", hdfsLocalFSConfig()) .set("skip_header_lines", 1) .set("parser", parserConfig(schemaConfig())); } static List<TaskReport> emptyTaskReports(int taskCount) { ImmutableList.Builder<TaskReport> reports = new ImmutableList.Builder<>(); for (int i = 0; i < taskCount; i++) { reports.add(Exec.newTaskReport()); } return reports.build(); } private ImmutableMap<String, Object> hdfsLocalFSConfig() { ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); builder.put("fs.hdfs.impl", "org.apache.hadoop.fs.LocalFileSystem"); builder.put("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem"); builder.put("fs.defaultFS", "file:///"); return builder.build(); } private ImmutableMap<String, Object> parserConfig(ImmutableList<Object> schemaConfig) { ImmutableMap.Builder<String, Object> builder = new ImmutableMap.Builder<>(); builder.put("type", "csv"); builder.put("newline", "CRLF"); builder.put("delimiter", ","); builder.put("quote", "\""); builder.put("escape", "\""); builder.put("trim_if_not_quoted", false); builder.put("skip_header_lines", 1); builder.put("allow_extra_columns", false); builder.put("allow_optional_columns", false); builder.put("columns", schemaConfig); return builder.build(); } private ImmutableList<Object> schemaConfig() { ImmutableList.Builder<Object> builder = new ImmutableList.Builder<>(); builder.add(ImmutableMap.of("name", "id", "type", "long")); builder.add(ImmutableMap.of("name", "account", "type", "long")); builder.add(ImmutableMap.of("name", "time", "type", "timestamp", "format", "%Y-%m-%d %H:%M:%S")); builder.add(ImmutableMap.of("name", "purchase", "type", "timestamp", "format", "%Y%m%d")); builder.add(ImmutableMap.of("name", "comment", "type", "string")); return builder.build(); } private void assertRecords(ConfigSource config, MockPageOutput output, long size) { List<Object[]> records = getRecords(config, output); assertEquals(size, records.size()); { Object[] record = records.get(0); assertEquals(1L, record[0]); assertEquals(32864L, record[1]); assertEquals("2015-01-27 19:23:49 UTC", record[2].toString()); assertEquals("2015-01-27 00:00:00 UTC", record[3].toString()); assertEquals("embulk", record[4]); } { Object[] record = records.get(1); assertEquals(2L, record[0]); assertEquals(14824L, record[1]); assertEquals("2015-01-27 19:01:23 UTC", record[2].toString()); assertEquals("2015-01-27 00:00:00 UTC", record[3].toString()); assertEquals("embulk jruby", record[4]); } } private List<Object[]> getRecords(ConfigSource config, MockPageOutput output) { Schema schema = config.getNested("parser").loadConfig(CsvParserPlugin.PluginTask.class).getSchemaConfig().toSchema(); return Pages.toObjects(schema, output.pages); } }
src/test/java/org/embulk/input/hdfs/TestHdfsFileInputPlugin.java
package org.embulk.input.hdfs; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.hadoop.fs.Path; import org.embulk.EmbulkTestRuntime; import org.embulk.config.ConfigException; import org.embulk.config.ConfigSource; import org.embulk.config.TaskReport; import org.embulk.config.TaskSource; import org.embulk.input.hdfs.HdfsFileInputPlugin.PluginTask; import org.embulk.spi.Exec; import org.embulk.spi.FileInputPlugin; import org.embulk.spi.FileInputRunner; import org.embulk.spi.InputPlugin; import org.embulk.spi.Schema; import org.embulk.spi.TestPageBuilderReader.MockPageOutput; import org.embulk.spi.util.Pages; import org.embulk.standards.CsvParserPlugin; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.slf4j.Logger; import javax.annotation.Nullable; import java.io.File; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; public class TestHdfsFileInputPlugin { @Rule public EmbulkTestRuntime runtime = new EmbulkTestRuntime(); @Rule public ExpectedException exception = ExpectedException.none(); private Logger logger = runtime.getExec().getLogger(TestHdfsFileInputPlugin.class); private HdfsFileInputPlugin plugin; private FileInputRunner runner; private MockPageOutput output; private Path path; @Before public void createResources() { plugin = new HdfsFileInputPlugin(); runner = new FileInputRunner(runtime.getInstance(HdfsFileInputPlugin.class)); output = new MockPageOutput(); path = new Path(new File(getClass().getResource("/sample_01.csv").getPath()).getParent()); } @Test public void testDefaultValues() { ConfigSource config = Exec.newConfigSource() .set("path", path.toString()); PluginTask task = config.loadConfig(PluginTask.class); assertEquals(path.toString(), task.getPath()); assertEquals(Lists.newArrayList(), task.getConfigFiles()); assertEquals(Maps.newHashMap(), task.getConfig()); assertEquals(true, task.getPartition()); assertEquals(0, task.getRewindSeconds()); assertEquals(-1, task.getApproximateNumPartitions()); assertEquals(0, task.getSkipHeaderLines()); assertEquals(false, task.getUseCompressionCodec()); } @Test(expected = ConfigException.class) public void testRequiredValues() { ConfigSource config = Exec.newConfigSource(); PluginTask task = config.loadConfig(PluginTask.class); } @Test public void testFileList() { ConfigSource config = getConfigWithDefaultValues(); config.set("num_partitions", 1); plugin.transaction(config, new FileInputPlugin.Control() { @Override public List<TaskReport> run(TaskSource taskSource, int taskCount) { PluginTask task = taskSource.loadTask(PluginTask.class); List<String> fileList = Lists.transform(Lists.newArrayList(new File(path.toString()).list()), new Function<String, String>() { @Nullable @Override public String apply(@Nullable String input) { return new File(path.toString() + "/" + input).toURI().toString(); } }); List<String> resultFList = Lists.transform(task.getFiles(), new Function<HdfsPartialFile, String>() { @Nullable @Override public String apply(@Nullable HdfsPartialFile input) { assert input != null; return input.getPath(); } }); assertEquals(fileList, resultFList); return emptyTaskReports(taskCount); } }); } @Test public void testHdfsFileInputByOpen() { ConfigSource config = getConfigWithDefaultValues(); config.set("num_partitions", 10); config.set("use_compression_codec", true); runner.transaction(config, new Control()); assertRecords(config, output, 12); } @Test public void testHdfsFileInputByOpenWithoutPartition() { ConfigSource config = getConfigWithDefaultValues(); config.set("partition", false); config.set("use_compression_codec", true); runner.transaction(config, new Control()); assertRecords(config, output, 12); } @Test public void testHdfsFileInputByOpenWithoutCompressionCodec() { ConfigSource config = getConfigWithDefaultValues(); config.set("partition", false); config.set("path", getClass().getResource("/sample_01.csv").getPath()); runner.transaction(config, new Control()); assertRecords(config, output, 4); } @Test public void testStrftime() { ConfigSource config = getConfigWithDefaultValues(); config.set("path", "/tmp/%Y-%m-%d"); config.set("rewind_seconds", 86400); PluginTask task = config.loadConfig(PluginTask.class); String result = plugin.strftime(task, task.getPath(), task.getRewindSeconds()); String expected = task.getJRuby().runScriptlet("(Time.now - 86400).strftime('/tmp/%Y-%m-%d')").toString(); assertEquals(expected, result); } private class Control implements InputPlugin.Control { @Override public List<TaskReport> run(TaskSource taskSource, Schema schema, int taskCount) { List<TaskReport> reports = new ArrayList<>(); for (int i = 0; i < taskCount; i++) { reports.add(runner.run(taskSource, schema, i, output)); } return reports; } } private ConfigSource getConfigWithDefaultValues() { return Exec.newConfigSource() .set("path", path.toString()) .set("config", hdfsLocalFSConfig()) .set("skip_header_lines", 1) .set("parser", parserConfig(schemaConfig())); } static List<TaskReport> emptyTaskReports(int taskCount) { ImmutableList.Builder<TaskReport> reports = new ImmutableList.Builder<>(); for (int i = 0; i < taskCount; i++) { reports.add(Exec.newTaskReport()); } return reports.build(); } private ImmutableMap<String, Object> hdfsLocalFSConfig() { ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); builder.put("fs.hdfs.impl", "org.apache.hadoop.fs.LocalFileSystem"); builder.put("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem"); builder.put("fs.defaultFS", "file:///"); return builder.build(); } private ImmutableMap<String, Object> parserConfig(ImmutableList<Object> schemaConfig) { ImmutableMap.Builder<String, Object> builder = new ImmutableMap.Builder<>(); builder.put("type", "csv"); builder.put("newline", "CRLF"); builder.put("delimiter", ","); builder.put("quote", "\""); builder.put("escape", "\""); builder.put("trim_if_not_quoted", false); builder.put("skip_header_lines", 1); builder.put("allow_extra_columns", false); builder.put("allow_optional_columns", false); builder.put("columns", schemaConfig); return builder.build(); } private ImmutableList<Object> schemaConfig() { ImmutableList.Builder<Object> builder = new ImmutableList.Builder<>(); builder.add(ImmutableMap.of("name", "id", "type", "long")); builder.add(ImmutableMap.of("name", "account", "type", "long")); builder.add(ImmutableMap.of("name", "time", "type", "timestamp", "format", "%Y-%m-%d %H:%M:%S")); builder.add(ImmutableMap.of("name", "purchase", "type", "timestamp", "format", "%Y%m%d")); builder.add(ImmutableMap.of("name", "comment", "type", "string")); return builder.build(); } private void assertRecords(ConfigSource config, MockPageOutput output, long size) { List<Object[]> records = getRecords(config, output); for (Object[] record : records) { for (Object c : record) { logger.info("{}", c); } } assertEquals(size, records.size()); { Object[] record = records.get(0); assertEquals(1L, record[0]); assertEquals(32864L, record[1]); assertEquals("2015-01-27 19:23:49 UTC", record[2].toString()); assertEquals("2015-01-27 00:00:00 UTC", record[3].toString()); assertEquals("embulk", record[4]); } { Object[] record = records.get(1); assertEquals(2L, record[0]); assertEquals(14824L, record[1]); assertEquals("2015-01-27 19:01:23 UTC", record[2].toString()); assertEquals("2015-01-27 00:00:00 UTC", record[3].toString()); assertEquals("embulk jruby", record[4]); } } private List<Object[]> getRecords(ConfigSource config, MockPageOutput output) { Schema schema = config.getNested("parser").loadConfig(CsvParserPlugin.PluginTask.class).getSchemaConfig().toSchema(); return Pages.toObjects(schema, output.pages); } }
Remove debug log
src/test/java/org/embulk/input/hdfs/TestHdfsFileInputPlugin.java
Remove debug log
Java
mit
8c9a593204301b9f1a08250ced10efb9bb13ce37
0
ReplayMod/MCProtocolLib,xDiP/MCProtocolLib,MCGamerNetwork/MCProtocolLib,Steveice10/MCProtocolLib,kukrimate/MCProtocolLib,Johni0702/MCProtocolLib,HexogenDev/MCProtocolLib
package ch.spacebase.mcprotocol.standard; import java.io.EOFException; import java.io.IOException; import java.net.Socket; import java.net.UnknownHostException; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import javax.crypto.SecretKey; import org.bouncycastle.crypto.BufferedBlockCipher; import org.bouncycastle.crypto.engines.AESFastEngine; import org.bouncycastle.crypto.io.CipherInputStream; import org.bouncycastle.crypto.io.CipherOutputStream; import org.bouncycastle.crypto.modes.CFBBlockCipher; import org.bouncycastle.crypto.params.KeyParameter; import org.bouncycastle.crypto.params.ParametersWithIV; import ch.spacebase.mcprotocol.event.DisconnectEvent; import ch.spacebase.mcprotocol.event.PacketRecieveEvent; import ch.spacebase.mcprotocol.event.PacketSendEvent; import ch.spacebase.mcprotocol.exception.ConnectException; import ch.spacebase.mcprotocol.net.BaseConnection; import ch.spacebase.mcprotocol.net.Client; import ch.spacebase.mcprotocol.net.Connection; import ch.spacebase.mcprotocol.net.Protocol; import ch.spacebase.mcprotocol.net.ServerConnection; import ch.spacebase.mcprotocol.packet.Packet; import ch.spacebase.mcprotocol.standard.io.StandardInput; import ch.spacebase.mcprotocol.standard.io.StandardOutput; import ch.spacebase.mcprotocol.standard.packet.PacketDisconnect; import ch.spacebase.mcprotocol.util.Util; /** * A connection implementing standard Minecraft protocol. */ public abstract class StandardConnection extends BaseConnection { /** * The connection's socket. */ private Socket sock; /** * The connection's input stream. */ private StandardInput input; /** * The connection's output stream. */ private StandardOutput output; /** * The connection's packet write queue. */ private Queue<Packet> packets = new ConcurrentLinkedQueue<Packet>(); /** * Whether the connection is reading. */ private boolean reading = false; /** * Whether the connection is writing. */ private boolean writing = false; /** * Whether the connection is connected. */ private boolean connected = false; /** * The connection's secret key. */ private SecretKey key; /** * Creates a new standard connection. * @param host Host to connect to. * @param port Port to connect to. */ public StandardConnection(String host, int port) { super(host, port); } @Override public Protocol getType() { return Protocol.STANDARD; } @Override public boolean isConnected() { return this.connected; } /** * Connects using the given socket. * @param sock Socket to use. * @throws ConnectException If a connection error occurs. */ protected void connect(Socket sock) throws ConnectException { this.sock = sock; try { this.input = new StandardInput(this.sock.getInputStream()); this.output = new StandardOutput(this.sock.getOutputStream()); this.connected = true; new ListenThread().start(); new WriteThread().start(); } catch (UnknownHostException e) { throw new ConnectException("Unknown host: " + this.getRemoteHost()); } catch (IOException e) { throw new ConnectException("Failed to open stream: " + this.getRemoteHost(), e); } } @Override public void disconnect(String reason) { this.disconnect(reason, true); } @Override public void disconnect(String reason, boolean packet) { this.packets.clear(); if(packet) { this.send(new PacketDisconnect(reason)); } new CloseThread().start(); this.connected = false; this.call(new DisconnectEvent(this, reason)); } @Override public void send(Packet packet) { this.packets.add(packet); } /** * Gets the protocol's secret key. * @return The protocol's secret key. */ public SecretKey getSecretKey() { return this.key; } /** * Sets the protocol's secret key. * @param key The new secret key. */ public void setSecretKey(SecretKey key) { this.key = key; } /** * Enabled AES encryption on the connection. * @param conn Connection to enable AES on. */ public void setAES(Connection conn) { BufferedBlockCipher in = new BufferedBlockCipher(new CFBBlockCipher(new AESFastEngine(), 8)); in.init(false, new ParametersWithIV(new KeyParameter(this.key.getEncoded()), this.key.getEncoded(), 0, 16)); BufferedBlockCipher out = new BufferedBlockCipher(new CFBBlockCipher(new AESFastEngine(), 8)); out.init(true, new ParametersWithIV(new KeyParameter(this.key.getEncoded()), this.key.getEncoded(), 0, 16)); this.input = new StandardInput(new CipherInputStream(this.input.getStream(), in)); this.output = new StandardOutput(new CipherOutputStream(this.output.getStream(), out)); } /** * A thread listening for incoming packets. */ private class ListenThread extends Thread { @Override public void run() { while(isConnected()) { try { reading = true; int opcode = input.readUnsignedByte(); if(opcode < 0) { continue; } if(getType().getPacket(opcode) == null) { Util.logger().severe("Bad packet ID: " + opcode); disconnect("Bad packet ID: " + opcode); return; } Packet packet = getType().getPacket(opcode).newInstance(); packet.read(input); call(new PacketRecieveEvent(packet)); if(StandardConnection.this instanceof Client) { packet.handleClient((Client) StandardConnection.this); } else if(StandardConnection.this instanceof ServerConnection) { packet.handleServer((ServerConnection) StandardConnection.this); } reading = false; } catch(EOFException e) { disconnect("End of Stream"); } catch (Exception e) { Util.logger().severe("Error while listening to connection!"); e.printStackTrace(); disconnect("Error while listening to connection!"); } try { Thread.sleep(2); } catch (InterruptedException e) { } reading = false; } } } /** * A thread writing outgoing packets. */ private class WriteThread extends Thread { @Override public void run() { while(isConnected()) { if(packets.size() > 0) { writing = true; Packet packet = packets.poll(); call(new PacketSendEvent(packet)); try { output.writeByte(packet.getId()); packet.write(output); output.flush(); } catch (Exception e) { Util.logger().severe("Error while writing packet \"" + packet.getId() + "\"!"); e.printStackTrace(); disconnect("Error while writing packet."); } writing = false; } try { Thread.sleep(2); } catch (InterruptedException e) { } writing = false; } } } /** * A thread that waits for the connection to finish before closing it. */ private class CloseThread extends Thread { @Override public void run() { while(reading || writing) { try { Thread.sleep(2); } catch (InterruptedException e) { } } try { sock.close(); } catch (IOException e) { System.err.println("Failed to close socket."); e.printStackTrace(); } } } }
src/main/java/ch/spacebase/mcprotocol/standard/StandardConnection.java
package ch.spacebase.mcprotocol.standard; import java.io.EOFException; import java.io.IOException; import java.net.Socket; import java.net.UnknownHostException; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import javax.crypto.SecretKey; import org.bouncycastle.crypto.BufferedBlockCipher; import org.bouncycastle.crypto.engines.AESFastEngine; import org.bouncycastle.crypto.io.CipherInputStream; import org.bouncycastle.crypto.io.CipherOutputStream; import org.bouncycastle.crypto.modes.CFBBlockCipher; import org.bouncycastle.crypto.params.KeyParameter; import org.bouncycastle.crypto.params.ParametersWithIV; import ch.spacebase.mcprotocol.event.DisconnectEvent; import ch.spacebase.mcprotocol.event.PacketRecieveEvent; import ch.spacebase.mcprotocol.event.PacketSendEvent; import ch.spacebase.mcprotocol.exception.ConnectException; import ch.spacebase.mcprotocol.net.BaseConnection; import ch.spacebase.mcprotocol.net.Client; import ch.spacebase.mcprotocol.net.Connection; import ch.spacebase.mcprotocol.net.Protocol; import ch.spacebase.mcprotocol.net.ServerConnection; import ch.spacebase.mcprotocol.packet.Packet; import ch.spacebase.mcprotocol.standard.io.StandardInput; import ch.spacebase.mcprotocol.standard.io.StandardOutput; import ch.spacebase.mcprotocol.standard.packet.PacketDisconnect; import ch.spacebase.mcprotocol.util.Util; /** * A connection implementing standard Minecraft protocol. */ public abstract class StandardConnection extends BaseConnection { /** * The connection's socket. */ private Socket sock; /** * The connection's input stream. */ private StandardInput input; /** * The connection's output stream. */ private StandardOutput output; /** * The connection's packet write queue. */ private Queue<Packet> packets = new ConcurrentLinkedQueue<Packet>(); /** * Whether the connection is reading. */ private boolean reading = false; /** * Whether the connection is writing. */ private boolean writing = false; /** * Whether the connection is connected. */ private boolean connected = false; /** * The connection's secret key. */ private SecretKey key; /** * Creates a new standard connection. * @param host Host to connect to. * @param port Port to connect to. */ public StandardConnection(String host, int port) { super(host, port); } @Override public Protocol getType() { return Protocol.STANDARD; } @Override public boolean isConnected() { return this.connected; } /** * Connects using the given socket. * @param sock Socket to use. * @throws ConnectException If a connection error occurs. */ protected void connect(Socket sock) throws ConnectException { this.sock = sock; try { this.input = new StandardInput(this.sock.getInputStream()); this.output = new StandardOutput(this.sock.getOutputStream()); this.connected = true; new ListenThread().start(); new WriteThread().start(); } catch (UnknownHostException e) { throw new ConnectException("Unknown host: " + this.getRemoteHost()); } catch (IOException e) { throw new ConnectException("Failed to open stream: " + this.getRemoteHost(), e); } } @Override public void disconnect(String reason) { this.disconnect(reason, true); } @Override public void disconnect(String reason, boolean packet) { if(packet) { this.send(new PacketDisconnect(reason)); } new CloseThread().start(); this.connected = false; this.call(new DisconnectEvent(this, reason)); } @Override public void send(Packet packet) { this.packets.add(packet); } /** * Gets the protocol's secret key. * @return The protocol's secret key. */ public SecretKey getSecretKey() { return this.key; } /** * Sets the protocol's secret key. * @param key The new secret key. */ public void setSecretKey(SecretKey key) { this.key = key; } /** * Enabled AES encryption on the connection. * @param conn Connection to enable AES on. */ public void setAES(Connection conn) { BufferedBlockCipher in = new BufferedBlockCipher(new CFBBlockCipher(new AESFastEngine(), 8)); in.init(false, new ParametersWithIV(new KeyParameter(this.key.getEncoded()), this.key.getEncoded(), 0, 16)); BufferedBlockCipher out = new BufferedBlockCipher(new CFBBlockCipher(new AESFastEngine(), 8)); out.init(true, new ParametersWithIV(new KeyParameter(this.key.getEncoded()), this.key.getEncoded(), 0, 16)); this.input = new StandardInput(new CipherInputStream(this.input.getStream(), in)); this.output = new StandardOutput(new CipherOutputStream(this.output.getStream(), out)); } /** * A thread listening for incoming packets. */ private class ListenThread extends Thread { @Override public void run() { while(isConnected()) { try { reading = true; int opcode = input.readUnsignedByte(); if(opcode < 0) { continue; } if(getType().getPacket(opcode) == null) { Util.logger().severe("Bad packet ID: " + opcode); disconnect("Bad packet ID: " + opcode); return; } Packet packet = getType().getPacket(opcode).newInstance(); packet.read(input); call(new PacketRecieveEvent(packet)); if(StandardConnection.this instanceof Client) { packet.handleClient((Client) StandardConnection.this); } else if(StandardConnection.this instanceof ServerConnection) { packet.handleServer((ServerConnection) StandardConnection.this); } reading = false; } catch(EOFException e) { disconnect("End of Stream"); } catch (Exception e) { Util.logger().severe("Error while listening to connection!"); e.printStackTrace(); disconnect("Error while listening to connection!"); } try { Thread.sleep(2); } catch (InterruptedException e) { } } } } /** * A thread writing outgoing packets. */ private class WriteThread extends Thread { @Override public void run() { while(isConnected()) { if(packets.size() > 0) { writing = true; Packet packet = packets.poll(); call(new PacketSendEvent(packet)); try { output.writeByte(packet.getId()); packet.write(output); output.flush(); } catch (Exception e) { Util.logger().severe("Error while writing packet \"" + packet.getId() + "\"!"); e.printStackTrace(); disconnect("Error while writing packet."); } writing = false; } try { Thread.sleep(2); } catch (InterruptedException e) { } } } } /** * A thread that waits for the connection to finish before closing it. */ private class CloseThread extends Thread { @Override public void run() { while(reading || writing) { try { Thread.sleep(2); } catch (InterruptedException e) { } } try { sock.close(); } catch (IOException e) { System.err.println("Failed to close socket."); e.printStackTrace(); } } } }
Clear packet queue on disconnect. Also, make sure writing/reading are set to false on error
src/main/java/ch/spacebase/mcprotocol/standard/StandardConnection.java
Clear packet queue on disconnect. Also, make sure writing/reading are set to false on error
Java
mit
0db3a4b161c71c67fa475ec08e820bf1c3d0cd02
0
kreneskyp/openconferenceware-android
package org.osb; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; import android.app.AlertDialog; import android.app.Dialog; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.graphics.Color; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.text.Html; import android.text.method.LinkMovementMethod; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.SubMenu; import android.view.View; import android.view.ViewGroup; import android.view.View.OnClickListener; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.ScrollView; import android.widget.TextView; import android.widget.ViewFlipper; public class ScheduleActivity extends AbstractActivity { private static final int MENU_DATE_BASE = 1000; private static final int MENU_NEXT = 5; private static final int MENU_PREV = 6; private static final int MENU_ABOUT = 7; private static final int MENU_NOW = 8; private static final int MENU_REFRESH = 9; private static final int DIALOG_ABOUT = 1; private static final int DIALOG_LOADING = 2; // state Date mCurrentDate; Date mLoadDate; TextView mDate; boolean mDetail = false; Handler mHandler; // general conference data Conference mConference; Date[] mDates; HashMap<Date, Schedule> mSchedule; // session list EventAdapter mAdapter; ListView mEvents; // screen animation ViewFlipper mFlipper; Animation mInLeft; Animation mInRight; Animation mOutLeft; Animation mOutRight; // session details Event mEvent = null; HashMap<Integer, Speaker> mSpeakers; View mHeader; TextView mTitle; TextView mTime; TextView mLocation; View mTimeLocation; TextView mSpeaker; ScrollView mDescriptionScroller; TextView mDescription; ImageView mMapImage; LinearLayout mBio; // session detail actions Button mFoursquare; Button mShare; Button mMap; Button mShowDescription; Button mShowBio; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); mHandler = new Handler(); mSpeakers = new HashMap<Integer, Speaker>(); mSchedule = new HashMap<Date, Schedule>(); mDate = (TextView) findViewById(R.id.date); mEvents = (ListView) findViewById(R.id.events); mFlipper = (ViewFlipper) findViewById(R.id.flipper); Context context = getApplicationContext(); mInLeft = AnimationUtils.loadAnimation(context, R.anim.slide_in_left); mInRight = AnimationUtils.loadAnimation(context, R.anim.slide_in_right); mOutLeft = AnimationUtils.loadAnimation(context, R.anim.slide_out_left); mOutRight = AnimationUtils.loadAnimation(context, R.anim.slide_out_right); // grab views for details View detail = findViewById(R.id.detail); mHeader = findViewById(R.id.detail_header); mSpeaker = (TextView) findViewById(R.id.speaker); mTitle = (TextView) detail.findViewById(R.id.title); mTimeLocation = detail.findViewById(R.id.time_location); mTime = (TextView) detail.findViewById(R.id.time); mLocation = (TextView) detail.findViewById(R.id.location); mDescription = (TextView) detail.findViewById(R.id.description); mDescriptionScroller = (ScrollView) detail.findViewById(R.id.description_scroller); mMapImage = (ImageView) detail.findViewById(R.id.map_image); mBio = (LinearLayout) detail.findViewById(R.id.bio); // detail action buttons mFoursquare = (Button) findViewById(R.id.foursquare); mShare = (Button) findViewById(R.id.share); mMap = (Button) findViewById(R.id.map); mShowDescription = (Button) findViewById(R.id.show_description); mShowBio = (Button) findViewById(R.id.show_bio); mEvents.setOnItemClickListener(new ListView.OnItemClickListener() { public void onItemClick(AdapterView<?> adapterview, View view, int position, long id) { Object item = mAdapter.mFiltered.get(position); if (item instanceof Date) { return;// ignore clicks on the dates } mEvent = loadEvent((Event) item, false); loadDescriptionView(); mFlipper.setInAnimation(mInRight); mFlipper.setOutAnimation(mOutLeft); mFlipper.showNext(); mDetail = true; } }); mShowDescription.setOnClickListener(new OnClickListener() { public void onClick(View v) { show_description(); } }); mMap.setOnClickListener(new OnClickListener() { public void onClick(View v) { int id = getResources().getIdentifier("map_"+mEvent.location,"drawable",getPackageName()); mDescription.setVisibility(View.GONE); mBio.setVisibility(View.GONE); // only set&show image if a map image was found if (id!=0){ mMapImage.setImageResource(id); mMapImage.setVisibility(View.VISIBLE); } } }); mShowBio.setOnClickListener(new OnClickListener() { public void onClick(View v) { mBio.removeAllViews(); Integer[] speaker_ids = mEvent.speaker_ids; if (speaker_ids != null) { for (int i=0; i<speaker_ids.length; i++) { View view = loadBioView(speaker_ids[i]); if (view != null) { if (i>0){ view.setPadding(0, 30, 0, 0); } mBio.addView(view); } } mDescription.setVisibility(View.GONE); mMapImage.setVisibility(View.GONE); mBio.setVisibility(View.VISIBLE); } } /** * loads a view populated with the speakers info * @param id * @return */ private View loadBioView(int sid) { Integer id = new Integer(sid); Speaker speaker = null; View view = null; // check memory to see if speaker had already been loaded // else load the speaker from persistent storage if (mSpeakers.containsKey(id)){ speaker = mSpeakers.get(id); } else { speaker = getDataService().getSpeaker(id, false); mSpeakers.put(id, speaker); } // create view if (speaker != null) { LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); view = vi.inflate(R.layout.bio, null); TextView name = (TextView) view.findViewById(R.id.name); name.setText(speaker.name); TextView biography = (TextView) view.findViewById(R.id.biography); biography.setMovementMethod(LinkMovementMethod.getInstance()); biography.setText(Html.fromHtml(speaker.biography)); String twitter = speaker.twitter; if (twitter != null && twitter != "" && twitter != "null"){ TextView text = (TextView) view.findViewById(R.id.twitter); text.setText(twitter); View parent = (View) text.getParent(); parent.setVisibility(View.VISIBLE); } String website = speaker.website; if (website != null && website != "" && website != "null"){ TextView text = (TextView) view.findViewById(R.id.website); text.setText(speaker.website); View parent = (View) text.getParent(); parent.setVisibility(View.VISIBLE); } String blog = speaker.blog; if (blog != null && blog != "" && blog != "null"){ TextView text = (TextView) view.findViewById(R.id.blog); text.setText(speaker.blog); View parent = (View) text.getParent(); parent.setVisibility(View.VISIBLE); } if (speaker.affiliation != null){ TextView text = (TextView) view.findViewById(R.id.affiliation); text.setText(speaker.affiliation); } String identica = speaker.identica; if (identica != null && identica != "" && identica != "null"){ TextView text = (TextView) view.findViewById(R.id.identica); text.setText(speaker.identica); View parent = (View) text.getParent(); parent.setVisibility(View.VISIBLE); } } return view; } }); mFoursquare.setOnClickListener(new OnClickListener() { public void onClick(View v) { String url = mapRoomNameToFqUrl((mLocation).getText().toString()); Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); startActivity(intent); } private String mapRoomNameToFqUrl(String roomName) { String vid = ""; if (roomName.equals("Hawthorne")) { vid = "4281683"; } else if (roomName.equals("Burnside")) { vid = "4281826"; } else if (roomName.equals("St. Johns")) { vid = "4281970"; } else if (roomName.equals("Broadway")) { vid = "4281777"; } else if (roomName.equals("Morrison")) { vid = "4281923"; } else if (roomName.equals("Fremont")) { vid = "4281874"; } else if (roomName.equals("Steel")) { vid = "4282004"; } return "http://m.foursquare.com/checkin?vid="+vid; } }); mShare.setOnClickListener(new OnClickListener() { public void onClick(View v) { Intent intent = new Intent(android.content.Intent.ACTION_SEND); intent.setType("text/plain"); Resources r = getApplicationContext().getResources(); intent.putExtra(Intent.EXTRA_SUBJECT, r.getString(R.string.share_subject)); intent.putExtra(Intent.EXTRA_TEXT, r.getString(R.string.share_text) + mTitle.getText() + r.getString(R.string.share_text2)); startActivity(Intent.createChooser(intent, "Share")); } }); mAdapter = new EventAdapter(this, R.layout.listevent); mEvents.setAdapter(mAdapter); // spawn loading into separate thread loadSchedule(false); now(); } /** * Shows the session description, hides all other subviews */ private void show_description(){ mMapImage.setVisibility(View.GONE); mBio.setVisibility(View.GONE); mDescription.setVisibility(View.VISIBLE); } private void loadDescriptionView(){ Event event = mEvent; Track track = mConference.tracks.get(event.track); Location location = mConference.locations.get(event.location); // create list of speakers from all speaker objects String speaker_names = ""; Speaker speaker; for(Integer sid: event.speaker_ids){ if (mSpeakers.containsKey(sid)){ speaker = mSpeakers.get(sid); } else { speaker = getDataService().getSpeaker(sid, false); mSpeakers.put(sid, speaker); } if (speaker_names == "") { speaker_names = speaker.name; } else { speaker_names = speaker_names + ", " + speaker.name; } } mSpeaker.setText(speaker_names); mHeader.setBackgroundColor(Color.parseColor(track.color)); mTitle.setText(event.title); mTitle.setTextColor(Color.parseColor(track.color_text)); mLocation.setText(location.name); DateFormat startFormat = new SimpleDateFormat("E, h:mm"); DateFormat endFormat = new SimpleDateFormat("h:mm a"); String timeString = startFormat.format(event.start) + " - " + endFormat.format(event.end); mTime.setText(timeString); mTimeLocation.setBackgroundColor(Color.parseColor(track.color_dark)); mDescription.setMovementMethod(LinkMovementMethod.getInstance()); mDescription.setText(Html.fromHtml(event.description)); show_description(); mDescriptionScroller.scrollTo(0, 0); } /** * overridden to hook back button when on the detail page */ public boolean onKeyDown(int keyCode, KeyEvent event){ if (mDetail && keyCode == KeyEvent.KEYCODE_BACK){ showList(); return true; } return super.onKeyDown(keyCode, event); } /* Creates the menu items */ public boolean onCreateOptionsMenu(Menu menu) { menu.add(0, MENU_PREV, 0, "Previous Day").setIcon(R.drawable.ic_menu_back); SubMenu dayMenu = menu.addSubMenu("Day").setIcon(android.R.drawable.ic_menu_today); DateFormat formatter = new SimpleDateFormat("EEEE, MMMM d"); Date date; for (int i=0; i<mDates.length; i++){ date = mDates[i]; dayMenu.add(0, MENU_DATE_BASE+i, 0, formatter.format(date)); } menu.add(0, MENU_NEXT, 0, "Next Day").setIcon(R.drawable.ic_menu_forward); menu.add(0, MENU_NOW, 0, "Now").setIcon(R.drawable.time); menu.add(0, MENU_REFRESH, 0, "Refresh").setIcon(R.drawable.ic_menu_refresh); menu.add(0, MENU_ABOUT, 0, "About").setIcon(android.R.drawable.ic_menu_info_details); return true; } /* Handles item selections */ public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); switch (id) { case MENU_NOW: now(); return true; case MENU_PREV: previous(); return true; case MENU_NEXT: next(); return true; case MENU_ABOUT: showDialog(DIALOG_ABOUT); return true; case MENU_REFRESH: if (mDetail) { mEvent = loadEvent(mEvent, true); loadDescriptionView(); } else { new SetDayThread(mCurrentDate, true).start(); } return true; default: if (id >= MENU_DATE_BASE) { // must be a date menu option. all dates // menu options are an index offset by MENU_DATE_BASE //closeOptionsMenu(); new SetDayThread(mDates[item.getItemId()-MENU_DATE_BASE]).start(); return true; } } return false; } public void setDay(Date date){ setDay(date, false); } /* sets the current day, filtering the list if need be */ public void setDay(Date date, boolean force) { if (isSameDay(mCurrentDate, date) && !force) { // same day, just jump to current time mHandler.post(new Runnable(){ public void run(){ mAdapter.now(mCurrentDate); } }); } else { // different day, update the list. Load the date requested // if it is not already loaded mCurrentDate = date; mAdapter.filterDay(date, force); } // take user back to the listings if not already there showList(); } /** * Jumps the user to right now in the event list: * * - if its before or after the conference, it shows the beginning * of day 1 * - if its during the conference it will show the first event * currently underway */ public void now(){ // use now, since it will have the time of day for // jumping to the right time Date now = new Date(); if (now.before(mDates[0]) || now.after(mConference.end)) { now = (Date) mDates[0].clone(); } new SetDayThread(now).start(); } /** * Jumps to the next day, if not already at the end */ public void next() { if (!isSameDay(mCurrentDate, mConference.end)) { Date load = new Date(mCurrentDate.getYear(), mCurrentDate.getMonth(), mCurrentDate.getDate()+1); new SetDayThread(load).start(); } } /** * Jumps to the previous day if now already at the beginning */ public void previous() { if (!isSameDay(mCurrentDate, mConference.start)) { Date load = new Date(mCurrentDate.getYear(), mCurrentDate.getMonth(), mCurrentDate.getDate()-1); new SetDayThread(load).start(); } } /** * Shows the event listing */ public void showList() { if (mDetail) { mFlipper.setInAnimation(mInLeft); mFlipper.setOutAnimation(mOutRight); mFlipper.showPrevious(); mDetail=false; } } /** * Loads the osbridge schedule * @param force - force reload */ private void loadSchedule(boolean force) { //XXX set date to a day that is definitely, not now. // This will cause it to update the list immediately. mCurrentDate = new Date(1900, 0, 0); DataService service = getDataService(); mConference = service.getConference(force); mDates = mConference.getDates(); } /** * Loads detailed info for an event. This should only be called * for events from the currently loaded day. * * @param event - partially loaded event from schedule object * @param force - force reload of data. */ private Event loadEvent(Event partialEvent, boolean force) { DataService service = getDataService(); Event event; // load detailed info if needed // update both the lists stored in the adapter if (!partialEvent.details || force){ event = service.getEvent(partialEvent.id, force); mAdapter.mFiltered.set(mAdapter.mFiltered.indexOf(partialEvent), event); mAdapter.mItems.set(mAdapter.mItems.indexOf(partialEvent), event); } else { event = partialEvent; } // preload the speakers for this event Speaker speaker; for(Integer sid: event.speaker_ids){ if (mSpeakers.containsKey(sid) && !force){ speaker = mSpeakers.get(sid); } else { speaker = getDataService().getSpeaker(sid, force); mSpeakers.put(sid, speaker); } } return event; } protected Dialog onCreateDialog(int id){ Context context = getApplicationContext(); switch (id) { case DIALOG_ABOUT: LayoutInflater inflater = (LayoutInflater) context.getSystemService(LAYOUT_INFLATER_SERVICE); View view = inflater.inflate(R.layout.about, null); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("About"); builder.setCancelable(true); builder.setView(view); builder.setIcon(android.R.drawable.ic_dialog_info); return builder.create(); case DIALOG_LOADING: ProgressDialog progressDialog; progressDialog = new ProgressDialog(this); progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER); progressDialog.setMessage("Loading..."); progressDialog.setCancelable(true); return progressDialog; default: return null; } } /** * EventAdapter used for displaying a list of events * */ private class EventAdapter extends ArrayAdapter<Event> { private List<Event> mItems; private List<Object> mFiltered; public EventAdapter(Context context, int textViewResourceId) { super(context, textViewResourceId); mItems = new ArrayList<Event>(); mFiltered = new ArrayList<Object>(); } public EventAdapter(Context context, int textViewResourceId, List<Event> items) { super(context, textViewResourceId, items); mItems = items; mFiltered = new ArrayList<Object>(); } /** * Sets elements to the current schedule. This will use * cached data if already loaded. Else it will load it from * the dataservice * @param date - date to filter by */ public void filterDay(Date date, boolean force){ // Load the data for the requested day, load it from dataservice if needed // construct a new date with just year,month,day since keys only have that set // XXX adjust for timezone by setting time to noon Date load = new Date(date.getYear(), date.getMonth(), date.getDate(), 12, 0); if (mSchedule.containsKey(load) && !force){ mItems = mSchedule.get(load).events; } else { mHandler.post(new Runnable(){ public void run(){showDialog(DIALOG_LOADING);} }); DataService service = getDataService(); Schedule schedule = service.getSchedule(load, force); mSchedule.put(load, schedule); mItems = schedule.events; } List<Event> items = mItems; List<Object> filtered = new ArrayList<Object>(); int size = mItems.size(); Date currentStart = null; for (int i=0; i<size; i++){ Event event = items.get(i); if(currentStart == null || event.start.after(currentStart)) { currentStart = event.start; filtered.add(currentStart); } filtered.add(event); } mFiltered = filtered; mLoadDate = date; mHandler.post(new Runnable(){ public void run(){ DateFormat formatter = new SimpleDateFormat("E, MMMM d"); mDate.setText(formatter.format(mCurrentDate)); notifyDataSetChanged(); now(mLoadDate); removeDialog(DIALOG_LOADING); } }); } /** * sets the position to the current time * @param date */ public void now(Date date) { List<Object> filtered = mFiltered; int size = filtered.size(); for (int i=0; i<size; i++){ Object item = filtered.get(i); // find either the first session that hasn't ended yet // or the first time marker that hasn't occured yet. if (item instanceof Date ){ Date slot = (Date) item; if (date.before(slot)) { mEvents.setSelection(i); return; } } else { Event event = (Event) item; if (event.end.after(date)) { // should display the time marker instead of the // session mEvents.setSelection(i-1); return; } } } // no current event was found, jump to the next day next(); } public int getCount(){ return mFiltered.size(); } /** * Renders an item in the schedule list */ public View getView(int position, View convertView, ViewGroup parent) { View v = convertView; LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); Object item = mFiltered.get(position); if (item instanceof Date) { Date date = (Date)item; v = vi.inflate(R.layout.list_slot, null); TextView time = (TextView) v.findViewById(R.id.time); DateFormat formatter = new SimpleDateFormat("h:mm a"); time.setText(formatter.format(date)); } else { Event e = (Event) item; v = vi.inflate(R.layout.listevent, null); if (e != null) { TextView title = (TextView) v.findViewById(R.id.title); TextView locationView = (TextView) v.findViewById(R.id.location); TextView time = (TextView) v.findViewById(R.id.time); if (title != null) { title.setText(e.title); } if (e.location != -1) { Location location = mConference.locations.get(e.location); locationView.setText(location.name); } if (time != null) { DateFormat formatter = new SimpleDateFormat("h:mm"); time.setText(formatter.format(e.start) + "-" + formatter.format(e.end)); } if (e.track != -1) { TextView track_view = (TextView) v.findViewById(R.id.track); Track track = mConference.tracks.get(e.track); track_view.setTextColor(Color.parseColor(track.color)); track_view.setText(track.name); } } } return v; } } /** * Checks if two dates are the same day * @param date1 * @param date2 * @return */ public static boolean isSameDay(Date date1, Date date2) { if (date1 == null || date2 == null) { throw new IllegalArgumentException("The date must not be null"); } Calendar cal1 = Calendar.getInstance(); cal1.setTime(date1); Calendar cal2 = Calendar.getInstance(); cal2.setTime(date2); return isSameDay(cal1, cal2); } /** * Checks if two calendars are the same day * @param cal1 * @param cal2 * @return */ public static boolean isSameDay(Calendar cal1, Calendar cal2) { if (cal1 == null || cal2 == null) { throw new IllegalArgumentException("The date must not be null"); } return (cal1.get(Calendar.ERA) == cal2.get(Calendar.ERA) && cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR) && cal1.get(Calendar.DAY_OF_YEAR) == cal2.get(Calendar.DAY_OF_YEAR)); } /** * thread for setting schedule day. threaded so dialogs * can return immediately. */ class SetDayThread extends Thread { Date date; boolean reload; public SetDayThread(Date date) { this.date = date; this.reload = false; } public SetDayThread(Date date, boolean reload) { this.date = date; this.reload = reload; } public void run(){ try{ if (reload){ mHandler.post(new Runnable(){ public void run(){showDialog(DIALOG_LOADING);} }); // always reload the conference object when reloading loadSchedule(true); } setDay(date, reload); } catch (Exception e){ e.printStackTrace(); } } } }
src/org/osb/ScheduleActivity.java
package org.osb; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; import android.app.AlertDialog; import android.app.Dialog; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.graphics.Color; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.text.Html; import android.text.method.LinkMovementMethod; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.SubMenu; import android.view.View; import android.view.ViewGroup; import android.view.View.OnClickListener; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.ScrollView; import android.widget.TextView; import android.widget.ViewFlipper; public class ScheduleActivity extends AbstractActivity { private static final int MENU_DATE_BASE = 1000; private static final int MENU_NEXT = 5; private static final int MENU_PREV = 6; private static final int MENU_ABOUT = 7; private static final int MENU_NOW = 8; private static final int MENU_REFRESH = 9; private static final int DIALOG_ABOUT = 1; private static final int DIALOG_LOADING = 2; // state Date mCurrentDate; Date mLoadDate; TextView mDate; boolean mDetail = false; Handler mHandler; // general conference data Conference mConference; Date[] mDates; HashMap<Date, Schedule> mSchedule; // session list EventAdapter mAdapter; ListView mEvents; // screen animation ViewFlipper mFlipper; Animation mInLeft; Animation mInRight; Animation mOutLeft; Animation mOutRight; // session details Event mEvent = null; HashMap<Integer, Speaker> mSpeakers; View mHeader; TextView mTitle; TextView mTime; TextView mLocation; View mTimeLocation; TextView mSpeaker; ScrollView mDescriptionScroller; TextView mDescription; ImageView mMapImage; LinearLayout mBio; // session detail actions Button mFoursquare; Button mShare; Button mMap; Button mShowDescription; Button mShowBio; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); mHandler = new Handler(); mSpeakers = new HashMap<Integer, Speaker>(); mSchedule = new HashMap<Date, Schedule>(); mDate = (TextView) findViewById(R.id.date); mEvents = (ListView) findViewById(R.id.events); mFlipper = (ViewFlipper) findViewById(R.id.flipper); Context context = getApplicationContext(); mInLeft = AnimationUtils.loadAnimation(context, R.anim.slide_in_left); mInRight = AnimationUtils.loadAnimation(context, R.anim.slide_in_right); mOutLeft = AnimationUtils.loadAnimation(context, R.anim.slide_out_left); mOutRight = AnimationUtils.loadAnimation(context, R.anim.slide_out_right); // grab views for details View detail = findViewById(R.id.detail); mHeader = findViewById(R.id.detail_header); mSpeaker = (TextView) findViewById(R.id.speaker); mTitle = (TextView) detail.findViewById(R.id.title); mTimeLocation = detail.findViewById(R.id.time_location); mTime = (TextView) detail.findViewById(R.id.time); mLocation = (TextView) detail.findViewById(R.id.location); mDescription = (TextView) detail.findViewById(R.id.description); mDescriptionScroller = (ScrollView) detail.findViewById(R.id.description_scroller); mMapImage = (ImageView) detail.findViewById(R.id.map_image); mBio = (LinearLayout) detail.findViewById(R.id.bio); // detail action buttons mFoursquare = (Button) findViewById(R.id.foursquare); mShare = (Button) findViewById(R.id.share); mMap = (Button) findViewById(R.id.map); mShowDescription = (Button) findViewById(R.id.show_description); mShowBio = (Button) findViewById(R.id.show_bio); mEvents.setOnItemClickListener(new ListView.OnItemClickListener() { public void onItemClick(AdapterView<?> adapterview, View view, int position, long id) { Object item = mAdapter.mFiltered.get(position); if (item instanceof Date) { return;// ignore clicks on the dates } mEvent = loadEvent((Event) item, false); loadDescriptionView(); mFlipper.setInAnimation(mInRight); mFlipper.setOutAnimation(mOutLeft); mFlipper.showNext(); mDetail = true; } }); mShowDescription.setOnClickListener(new OnClickListener() { public void onClick(View v) { show_description(); } }); mMap.setOnClickListener(new OnClickListener() { public void onClick(View v) { int id = getResources().getIdentifier("map_"+mEvent.location,"drawable",getPackageName()); mDescription.setVisibility(View.GONE); mBio.setVisibility(View.GONE); // only set&show image if a map image was found if (id!=0){ mMapImage.setImageResource(id); mMapImage.setVisibility(View.VISIBLE); } } }); mShowBio.setOnClickListener(new OnClickListener() { public void onClick(View v) { mBio.removeAllViews(); Integer[] speaker_ids = mEvent.speaker_ids; if (speaker_ids != null) { for (int i=0; i<speaker_ids.length; i++) { View view = loadBioView(speaker_ids[i]); if (view != null) { if (i>0){ view.setPadding(0, 30, 0, 0); } mBio.addView(view); } } mDescription.setVisibility(View.GONE); mMapImage.setVisibility(View.GONE); mBio.setVisibility(View.VISIBLE); } } /** * loads a view populated with the speakers info * @param id * @return */ private View loadBioView(int sid) { Integer id = new Integer(sid); Speaker speaker = null; View view = null; // check memory to see if speaker had already been loaded // else load the speaker from persistent storage if (mSpeakers.containsKey(id)){ speaker = mSpeakers.get(id); } else { speaker = getDataService().getSpeaker(id, false); mSpeakers.put(id, speaker); } // create view if (speaker != null) { LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); view = vi.inflate(R.layout.bio, null); TextView name = (TextView) view.findViewById(R.id.name); name.setText(speaker.name); TextView biography = (TextView) view.findViewById(R.id.biography); biography.setMovementMethod(LinkMovementMethod.getInstance()); biography.setText(Html.fromHtml(speaker.biography)); String twitter = speaker.twitter; if (twitter != null && twitter != "" && twitter != "null"){ TextView text = (TextView) view.findViewById(R.id.twitter); text.setText(twitter); View parent = (View) text.getParent(); parent.setVisibility(View.VISIBLE); } String website = speaker.website; if (website != null && website != "" && website != "null"){ TextView text = (TextView) view.findViewById(R.id.website); text.setText(speaker.website); View parent = (View) text.getParent(); parent.setVisibility(View.VISIBLE); } String blog = speaker.blog; if (blog != null && blog != "" && blog != "null"){ TextView text = (TextView) view.findViewById(R.id.blog); text.setText(speaker.blog); View parent = (View) text.getParent(); parent.setVisibility(View.VISIBLE); } if (speaker.affiliation != null){ TextView text = (TextView) view.findViewById(R.id.affiliation); text.setText(speaker.affiliation); } String identica = speaker.identica; if (identica != null && identica != "" && identica != "null"){ TextView text = (TextView) view.findViewById(R.id.identica); text.setText(speaker.identica); View parent = (View) text.getParent(); parent.setVisibility(View.VISIBLE); } } return view; } }); mFoursquare.setOnClickListener(new OnClickListener() { public void onClick(View v) { String url = mapRoomNameToFqUrl((mLocation).getText().toString()); Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); startActivity(intent); } private String mapRoomNameToFqUrl(String roomName) { String vid = ""; if (roomName.equals("Hawthorne")) { vid = "4281683"; } else if (roomName.equals("Burnside")) { vid = "4281826"; } else if (roomName.equals("St. Johns")) { vid = "4281970"; } else if (roomName.equals("Broadway")) { vid = "4281777"; } else if (roomName.equals("Morrison")) { vid = "4281923"; } else if (roomName.equals("Fremont")) { vid = "4281874"; } else if (roomName.equals("Steel")) { vid = "4282004"; } return "http://m.foursquare.com/checkin?vid="+vid; } }); mShare.setOnClickListener(new OnClickListener() { public void onClick(View v) { Intent intent = new Intent(android.content.Intent.ACTION_SEND); intent.setType("text/plain"); Resources r = getApplicationContext().getResources(); intent.putExtra(Intent.EXTRA_SUBJECT, r.getString(R.string.share_subject)); intent.putExtra(Intent.EXTRA_TEXT, r.getString(R.string.share_text) + mTitle.getText() + r.getString(R.string.share_text2)); startActivity(Intent.createChooser(intent, "Share")); } }); mAdapter = new EventAdapter(this, R.layout.listevent); mEvents.setAdapter(mAdapter); // spawn loading into separate thread loadSchedule(false); now(); } /** * Shows the session description, hides all other subviews */ private void show_description(){ mMapImage.setVisibility(View.GONE); mBio.setVisibility(View.GONE); mDescription.setVisibility(View.VISIBLE); } private void loadDescriptionView(){ Event event = mEvent; Track track = mConference.tracks.get(event.track); Location location = mConference.locations.get(event.location); // create list of speakers from all speaker objects String speaker_names = ""; Speaker speaker; for(Integer sid: event.speaker_ids){ if (mSpeakers.containsKey(sid)){ speaker = mSpeakers.get(sid); } else { speaker = getDataService().getSpeaker(sid, false); mSpeakers.put(sid, speaker); } if (speaker_names == "") { speaker_names = speaker.name; } else { speaker_names = speaker_names + ", " + speaker.name; } } mSpeaker.setText(speaker_names); mHeader.setBackgroundColor(Color.parseColor(track.color)); mTitle.setText(event.title); mTitle.setTextColor(Color.parseColor(track.color_text)); mLocation.setText(location.name); DateFormat startFormat = new SimpleDateFormat("E, h:mm"); DateFormat endFormat = new SimpleDateFormat("h:mm a"); String timeString = startFormat.format(event.start) + " - " + endFormat.format(event.end); mTime.setText(timeString); mTimeLocation.setBackgroundColor(Color.parseColor(track.color_dark)); mDescription.setMovementMethod(LinkMovementMethod.getInstance()); mDescription.setText(Html.fromHtml(event.description)); show_description(); mDescriptionScroller.scrollTo(0, 0); } /** * overridden to hook back button when on the detail page */ public boolean onKeyDown(int keyCode, KeyEvent event){ if (mDetail && keyCode == KeyEvent.KEYCODE_BACK){ showList(); return true; } return super.onKeyDown(keyCode, event); } /* Creates the menu items */ public boolean onCreateOptionsMenu(Menu menu) { menu.add(0, MENU_PREV, 0, "Previous Day").setIcon(R.drawable.ic_menu_back); SubMenu dayMenu = menu.addSubMenu("Day").setIcon(android.R.drawable.ic_menu_today); DateFormat formatter = new SimpleDateFormat("EEEE, MMMM d"); Date date; for (int i=0; i<mDates.length; i++){ date = mDates[i]; dayMenu.add(0, MENU_DATE_BASE+i, 0, formatter.format(date)); } menu.add(0, MENU_NEXT, 0, "Next Day").setIcon(R.drawable.ic_menu_forward); menu.add(0, MENU_NOW, 0, "Now").setIcon(R.drawable.time); menu.add(0, MENU_REFRESH, 0, "Refresh").setIcon(R.drawable.ic_menu_refresh); menu.add(0, MENU_ABOUT, 0, "About").setIcon(android.R.drawable.ic_menu_info_details); return true; } /* Handles item selections */ public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); switch (id) { case MENU_NOW: now(); return true; case MENU_PREV: previous(); return true; case MENU_NEXT: next(); return true; case MENU_ABOUT: showDialog(DIALOG_ABOUT); return true; case MENU_REFRESH: if (mDetail) { mEvent = loadEvent(mEvent, true); loadDescriptionView(); } else { new SetDayThread(mCurrentDate, true).start(); } return true; default: if (id >= MENU_DATE_BASE) { // must be a date menu option. all dates // menu options are an index offset by MENU_DATE_BASE //closeOptionsMenu(); new SetDayThread(mDates[item.getItemId()-MENU_DATE_BASE]).start(); return true; } } return false; } public void setDay(Date date){ setDay(date, false); } /* sets the current day, filtering the list if need be */ public void setDay(Date date, boolean force) { if (isSameDay(mCurrentDate, date) && !force) { // same day, just jump to current time mHandler.post(new Runnable(){ public void run(){ mAdapter.now(mCurrentDate); } }); } else { // different day, update the list. Load the date requested // if it is not already loaded mCurrentDate = date; mAdapter.filterDay(date, force); } // take user back to the listings if not already there showList(); } /** * Jumps the user to right now in the event list: * * - if its before or after the conference, it shows the beginning * of day 1 * - if its during the conference it will show the first event * currently underway */ public void now(){ // use now, since it will have the time of day for // jumping to the right time Date now = new Date(); if (now.before(mDates[0]) || now.after(mConference.end)) { now = (Date) mDates[0].clone(); } new SetDayThread(now).start(); } /** * Jumps to the next day, if not already at the end */ public void next() { if (!isSameDay(mCurrentDate, mConference.end)) { Date load = new Date(mCurrentDate.getYear(), mCurrentDate.getMonth(), mCurrentDate.getDate()+1); new SetDayThread(load).start(); } } /** * Jumps to the previous day if now already at the beginning */ public void previous() { if (!isSameDay(mCurrentDate, mConference.start)) { Date load = new Date(mCurrentDate.getYear(), mCurrentDate.getMonth(), mCurrentDate.getDate()-1); new SetDayThread(load).start(); } } /** * Shows the event listing */ public void showList() { if (mDetail) { mFlipper.setInAnimation(mInLeft); mFlipper.setOutAnimation(mOutRight); mFlipper.showPrevious(); mDetail=false; } } /** * Loads the osbridge schedule * @param force - force reload */ private void loadSchedule(boolean force) { //XXX set date to a day that is definitely, not now. // This will cause it to update the list immediately. mCurrentDate = new Date(1900, 0, 0); DataService service = getDataService(); mConference = service.getConference(force); mDates = mConference.getDates(); } /** * Loads detailed info for an event. This should only be called * for events from the currently loaded day. * * @param event - partially loaded event from schedule object * @param force - force reload of data. */ private Event loadEvent(Event partialEvent, boolean force) { DataService service = getDataService(); Event event; // load detailed info if needed // update both the lists stored in the adapter if (!partialEvent.details || force){ event = service.getEvent(partialEvent.id, force); mAdapter.mFiltered.set(mAdapter.mFiltered.indexOf(partialEvent), event); mAdapter.mItems.set(mAdapter.mItems.indexOf(partialEvent), event); } else { event = partialEvent; } // preload the speakers for this event Speaker speaker; for(Integer sid: event.speaker_ids){ if (mSpeakers.containsKey(sid) && !force){ speaker = mSpeakers.get(sid); } else { speaker = getDataService().getSpeaker(sid, force); mSpeakers.put(sid, speaker); } } return event; } protected Dialog onCreateDialog(int id){ Context context = getApplicationContext(); switch (id) { case DIALOG_ABOUT: LayoutInflater inflater = (LayoutInflater) context.getSystemService(LAYOUT_INFLATER_SERVICE); View view = inflater.inflate(R.layout.about, null); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle("About"); builder.setCancelable(true); builder.setView(view); builder.setIcon(android.R.drawable.ic_dialog_info); return builder.create(); case DIALOG_LOADING: ProgressDialog progressDialog; progressDialog = new ProgressDialog(this); progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER); progressDialog.setMessage("Loading..."); progressDialog.setCancelable(true); return progressDialog; default: return null; } } /** * EventAdapter used for displaying a list of events * */ private class EventAdapter extends ArrayAdapter<Event> { private List<Event> mItems; private List<Object> mFiltered; public EventAdapter(Context context, int textViewResourceId) { super(context, textViewResourceId); mItems = new ArrayList<Event>(); mFiltered = new ArrayList<Object>(); } public EventAdapter(Context context, int textViewResourceId, List<Event> items) { super(context, textViewResourceId, items); mItems = items; mFiltered = new ArrayList<Object>(); } /** * Sets elements to the current schedule. This will use * cached data if already loaded. Else it will load it from * the dataservice * @param date - date to filter by */ public void filterDay(Date date, boolean force){ // Load the data for the requested day, load it from dataservice if needed // construct a new date with just year,month,day since keys only have that set // XXX adjust for timezone by setting time to noon Date load = new Date(date.getYear(), date.getMonth(), date.getDate(), 12, 0); if (mSchedule.containsKey(load) && !force){ mItems = mSchedule.get(load).events; } else { mHandler.post(new Runnable(){ public void run(){showDialog(DIALOG_LOADING);} }); DataService service = getDataService(); Schedule schedule = service.getSchedule(load, force); mSchedule.put(load, schedule); mItems = schedule.events; } List<Event> items = mItems; List<Object> filtered = new ArrayList<Object>(); int size = mItems.size(); Date currentStart = null; for (int i=0; i<size; i++){ Event event = items.get(i); if(currentStart == null || event.start.after(currentStart)) { currentStart = event.start; filtered.add(currentStart); } filtered.add(event); } mFiltered = filtered; mLoadDate = date; mHandler.post(new Runnable(){ public void run(){ DateFormat formatter = new SimpleDateFormat("E, MMMM d"); mDate.setText(formatter.format(mCurrentDate)); notifyDataSetChanged(); now(mLoadDate); removeDialog(DIALOG_LOADING); } }); } /** * sets the position to the current time * @param date */ public void now(Date date) { List<Object> filtered = mFiltered; int size = filtered.size(); for (int i=0; i<size; i++){ Object item = filtered.get(i); // find either the first session that hasn't ended yet // or the first time marker that hasn't occured yet. if (item instanceof Date ){ Date slot = (Date) item; if (date.before(slot)) { mEvents.setSelection(i); return; } } else { Event event = (Event) item; if (event.end.after(date)) { // should display the time marker instead of the // session mEvents.setSelection(i-1); return; } } } // no current event was found, jump to the next day next(); } public int getCount(){ return mFiltered.size(); } /** * Renders an item in the schedule list */ public View getView(int position, View convertView, ViewGroup parent) { View v = convertView; LayoutInflater vi = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); Object item = mFiltered.get(position); if (item instanceof Date) { Date date = (Date)item; v = vi.inflate(R.layout.list_slot, null); TextView time = (TextView) v.findViewById(R.id.time); DateFormat formatter = new SimpleDateFormat("h:mm a"); time.setText(formatter.format(date)); } else { Event e = (Event) item; v = vi.inflate(R.layout.listevent, null); if (e != null) { TextView title = (TextView) v.findViewById(R.id.title); TextView locationView = (TextView) v.findViewById(R.id.location); TextView time = (TextView) v.findViewById(R.id.time); if (title != null) { title.setText(e.title); } if (e.location != -1) { Location location = mConference.locations.get(e.location); locationView.setText(location.name); } if (time != null) { DateFormat formatter = new SimpleDateFormat("h:mm"); time.setText(formatter.format(e.start) + "-" + formatter.format(e.end)); } if (e.track != -1) { TextView track_view = (TextView) v.findViewById(R.id.track); Track track = mConference.tracks.get(e.track); track_view.setTextColor(Color.parseColor(track.color)); track_view.setText(track.name); } } } return v; } } /** * Checks if two dates are the same day * @param date1 * @param date2 * @return */ public static boolean isSameDay(Date date1, Date date2) { if (date1 == null || date2 == null) { throw new IllegalArgumentException("The date must not be null"); } Calendar cal1 = Calendar.getInstance(); cal1.setTime(date1); Calendar cal2 = Calendar.getInstance(); cal2.setTime(date2); return isSameDay(cal1, cal2); } /** * Checks if two calendars are the same day * @param cal1 * @param cal2 * @return */ public static boolean isSameDay(Calendar cal1, Calendar cal2) { if (cal1 == null || cal2 == null) { throw new IllegalArgumentException("The date must not be null"); } return (cal1.get(Calendar.ERA) == cal2.get(Calendar.ERA) && cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR) && cal1.get(Calendar.DAY_OF_YEAR) == cal2.get(Calendar.DAY_OF_YEAR)); } /** * thread for setting schedule day. threaded so dialogs * can return immediately. */ class SetDayThread extends Thread { Date date; boolean reload; public SetDayThread(Date date) { this.date = date; this.reload = false; } public SetDayThread(Date date, boolean reload) { this.date = date; this.reload = reload; } public void run(){ try{ if (reload){ mHandler.post(new Runnable(){ public void run(){showDialog(DIALOG_LOADING);} }); // always reload the conference object when reloading loadSchedule(true); } setDay(date, reload); } catch (Exception e){ e.printStackTrace(); } } } }
replacing tabs with spaces
src/org/osb/ScheduleActivity.java
replacing tabs with spaces
Java
mit
6ce458a1566bf59048eb474faa4e0dc143f83c75
0
SUSE/saltstack-netapi-client-java,SUSE/salt-netapi-client,SUSE/saltstack-netapi-client-java,mbologna/salt-netapi-client,mbologna/salt-netapi-client,mbologna/saltstack-netapi-client-java,mbologna/saltstack-netapi-client-java
package com.suse.saltstack.netapi.client; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.any; import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; import static com.github.tomakehurst.wiremock.client.WireMock.equalToJson; import static com.github.tomakehurst.wiremock.client.WireMock.getRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.post; import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.client.WireMock.urlMatching; import static com.github.tomakehurst.wiremock.client.WireMock.verify; import static com.suse.saltstack.netapi.AuthModule.AUTO; import static com.suse.saltstack.netapi.AuthModule.PAM; import static com.suse.saltstack.netapi.config.ClientConfig.SOCKET_TIMEOUT; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import com.suse.saltstack.netapi.calls.wheel.Key; import com.suse.saltstack.netapi.client.impl.JDKConnectionFactory; import com.suse.saltstack.netapi.datatypes.Job; import com.suse.saltstack.netapi.datatypes.ScheduledJob; import com.suse.saltstack.netapi.datatypes.Token; import com.suse.saltstack.netapi.datatypes.cherrypy.Stats; import com.suse.saltstack.netapi.datatypes.target.Glob; import com.suse.saltstack.netapi.exception.SaltStackException; import com.suse.saltstack.netapi.exception.SaltUserUnauthorizedException; import com.suse.saltstack.netapi.results.ResultInfo; import com.suse.saltstack.netapi.results.ResultInfoSet; import com.suse.saltstack.netapi.utils.ClientUtils; import com.github.tomakehurst.wiremock.junit.WireMockRule; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import com.google.gson.JsonSyntaxException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.net.HttpURLConnection; import java.net.URI; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; /** * SaltStack API unit tests. */ public class SaltStackClientTest { private static final int MOCK_HTTP_PORT = 8888; static final String JSON_START_COMMAND_REQUEST = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/minions_request.json")); static final String JSON_START_COMMAND_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/minions_response.json")); static final String JSON_GET_MINIONS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/get_minions_response.json")); static final String JSON_GET_MINION_DETAILS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/minion_details_response.json")); static final String JSON_LOGIN_REQUEST = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/login_request.json")); static final String JSON_LOGIN_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/login_response.json")); static final String JSON_RUN_REQUEST = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/run_request.json")); static final String JSON_RUN_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/run_response.json")); static final String JSON_STATS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/stats_response.json")); static final String JSON_KEYS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/keys_response.json")); static final String JSON_JOBS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/jobs_response.json")); static final String JSON_JOBS_RESPONSE_PENDING = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/jobs_response_pending.json")); static final String JSON_JOBS_RESPONSE_RESULT = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/jobs_response_result.json")); static final String JSON_JOBS_INVALID_START_TIME_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream( "/jobs_response_invalid_start_time.json")); static final String JSON_JOBS_NULL_START_TIME_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream( "/jobs_response_null_start_time.json")); static final String JSON_HOOK_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/hook_response.json")); static final String JSON_LOGOUT_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/logout_response.json")); @Rule public WireMockRule wireMockRule = new WireMockRule(MOCK_HTTP_PORT); private SaltStackClient client; @Rule public ExpectedException exception = ExpectedException.none(); @Before public void init() { URI uri = URI.create("http://localhost:" + Integer.toString(MOCK_HTTP_PORT)); client = new SaltStackClient(uri); } @Test public void testLoginOk() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_LOGIN_RESPONSE))); Token token = client.login("user", "pass", AUTO); verifyLoginToken(token); } @Test public void testLoginAsyncOk() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_LOGIN_RESPONSE))); Future<Token> futureToken = client.loginAsync("user", "pass", AUTO); Token token = futureToken.get(); verifyLoginToken(token); } private void verifyLoginToken(Token token) { verify(1, postRequestedFor(urlEqualTo("/login")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalToJson(JSON_LOGIN_REQUEST))); assertEquals("Token mismatch", "f248284b655724ca8a86bcab4b8df608ebf5b08b", token.getToken()); assertEquals("EAuth mismatch", "auto", token.getEauth()); assertEquals("User mismatch", "user", token.getUser()); assertEquals("Perms mismatch", Arrays.asList(".*", "@wheel"), token.getPerms()); } @Test(expected = SaltUserUnauthorizedException.class) public void testLoginFailure() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_UNAUTHORIZED))); client.login("user", "pass", AUTO); } @Test(expected = ExecutionException.class) public void testLoginAsyncFailure() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_UNAUTHORIZED))); Future<Token> futureToken = client.loginAsync("user", "pass", AUTO); Token token = futureToken.get(); assertNull(token); } @Test public void testRunRequest() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_RUN_RESPONSE))); List<Object> args = new ArrayList<>(); args.add("i3"); Map<String, Object> kwargs = new LinkedHashMap<>(); kwargs.put("refresh", "true"); kwargs.put("sysupgrade", "false"); Map<String, Object> retvals = client.run("user", "pass", PAM, "local", new Glob(), "pkg.install", args, kwargs); verifyRunResults(retvals); } @Test public void testRunRequestAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_RUN_RESPONSE))); List<Object> args = new ArrayList<>(); args.add("i3"); Map<String, Object> kwargs = new LinkedHashMap<>(); kwargs.put("refresh", "true"); kwargs.put("sysupgrade", "false"); Future<Map<String, Object>> future = client.runAsync("user", "pass", PAM, "local", new Glob(), "pkg.install", args, kwargs); Map<String, Object> retvals = future.get(); verifyRunResults(retvals); } private void verifyRunResults(Map<String, Object> retvals) { verify(1, postRequestedFor(urlEqualTo("/run")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalToJson(JSON_RUN_REQUEST))); LinkedHashMap<String, String> i3 = new LinkedHashMap<>(); i3.put("new", "4.10.3-1"); i3.put("old", ""); LinkedHashMap<String, String> i3lock = new LinkedHashMap<>(); i3lock.put("new", "2.7-1"); i3lock.put("old", ""); LinkedHashMap<String, String> i3status = new LinkedHashMap<>(); i3status.put("new", "2.9-2"); i3status.put("old", ""); Map<String, Map<String, String>> expected = new LinkedHashMap<>(); expected.put("i3", i3); expected.put("i3lock", i3lock); expected.put("i3status", i3status); assertNotNull(retvals); assertTrue(retvals.containsKey("minion-1")); assertEquals(expected, retvals.get("minion-1")); } @Test public void testRunRequestWithSocketTimeout() throws Exception { exception.expect(SaltStackException.class); exception.expectMessage(containsString("Read timed out")); // create a local SaltStackClient with a fast timeout configuration // to do not lock tests more than 2s URI uri = URI.create("http://localhost:" + Integer.toString(MOCK_HTTP_PORT)); SaltStackClient clientWithFastTimeout = new SaltStackClient(uri); clientWithFastTimeout.getConfig().put(SOCKET_TIMEOUT, 1000); stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withFixedDelay(2000))); clientWithFastTimeout.login("user", "pass", AUTO); } @Test public void testRunRequestWithSocketTimeoutThroughJDKConnection() throws Exception { exception.expect(SaltStackException.class); exception.expectMessage(containsString("Read timed out")); // create a local SaltStackClient with a fast timeout configuration // to do not lock tests more than 2s URI uri = URI.create("http://localhost:" + Integer.toString(MOCK_HTTP_PORT)); SaltStackClient clientWithFastTimeout = new SaltStackClient(uri, new JDKConnectionFactory()); clientWithFastTimeout.getConfig().put(SOCKET_TIMEOUT, 1000); stubFor(post(urlEqualTo("/login")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .willReturn(aResponse() .withFixedDelay(2000))); clientWithFastTimeout.login("user", "pass", AUTO); } @Test public void testGetMinions() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Accept", "application/json") .withBody(JSON_GET_MINIONS_RESPONSE))); Map<String, Map<String, Object>> minions = client.getMinions(); verifyMinions(minions); } @Test public void testGetMinionsAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Accept", "application/json") .withBody(JSON_GET_MINIONS_RESPONSE))); Future<Map<String, Map<String, Object>>> future = client.getMinionsAsync(); Map<String, Map<String, Object>> minions = future.get(); verifyMinions(minions); } @SuppressWarnings("unchecked") private void verifyMinions(Map<String, Map<String, Object>> minions) { verify(1, getRequestedFor(urlEqualTo("/minions")) .withHeader("Accept", equalTo("application/json"))); assertNotNull(minions); assertEquals(2, minions.size()); assertTrue(minions.containsKey("minion1")); assertTrue(minions.containsKey("minion2")); Map<String, Object> minion1 = minions.get("minion1"); assertEquals(56, minion1.size()); assertEquals("VirtualBox", minion1.get("biosversion")); assertTrue(minion1.get("saltversioninfo") instanceof List); List<String> saltVersionInfo = (List<String>) minion1.get("saltversioninfo"); assertEquals(2014.0, saltVersionInfo.get(0)); assertEquals(7.0, saltVersionInfo.get(1)); assertEquals(5.0, saltVersionInfo.get(2)); assertEquals(0.0, saltVersionInfo.get(3)); assertTrue(minion1.get("locale_info") instanceof Map); Map<String, String> localeInfo = ((Map<String, String>) minion1.get("locale_info")); assertEquals("en_US", localeInfo.get("defaultlanguage")); assertEquals("UTF-8", localeInfo.get("defaultencoding")); } @Test public void testGetMinionDetails() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Accept", "application/json") .withBody(JSON_GET_MINION_DETAILS_RESPONSE))); Map<String, Object> minion = client.getMinionDetails("minion2"); verifyMinionsDetails(minion); } @Test public void testGetMinionDetailsAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Accept", "application/json") .withBody(JSON_GET_MINION_DETAILS_RESPONSE))); Future<Map<String, Object>> future = client.getMinionDetailsAsync("minion2"); Map<String, Object> minion = future.get(); verifyMinionsDetails(minion); } @SuppressWarnings("unchecked") private void verifyMinionsDetails(Map<String, Object> minion) { verify(1, getRequestedFor(urlEqualTo("/minions/minion2")) .withHeader("Accept", equalTo("application/json"))); assertNotNull(minion); assertEquals(56, minion.size()); assertEquals("VirtualBox", minion.get("biosversion")); assertTrue(minion.get("saltversioninfo") instanceof List); List<String> saltVersionInfo = (List<String>) minion.get("saltversioninfo"); assertEquals(2014.0, saltVersionInfo.get(0)); assertEquals(7.0, saltVersionInfo.get(1)); assertEquals(5.0, saltVersionInfo.get(2)); assertEquals(0.0, saltVersionInfo.get(3)); assertTrue(minion.get("locale_info") instanceof Map); Map<String, String> localeInfo = ((Map<String, String>) minion.get("locale_info")); assertEquals("en_US", localeInfo.get("defaultlanguage")); assertEquals("UTF-8", localeInfo.get("defaultencoding")); } @Test public void testStartCommand() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_START_COMMAND_RESPONSE))); List<Object> args = new ArrayList<>(); args.add("i3"); Map<String, Object> kwargs = new LinkedHashMap<>(); kwargs.put("refresh", "true"); kwargs.put("sysupgrade", "false"); ScheduledJob job = client.startCommand(new Glob(), "pkg.install", args, kwargs); verify(1, postRequestedFor(urlEqualTo("/minions")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalToJson(JSON_START_COMMAND_REQUEST))); assertNotNull(job); assertEquals("20150211105524392307", job.getJid()); assertEquals(Arrays.asList("myminion"), job.getMinions()); } @Test public void testQueryJobResult() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE_RESULT))); Map<String, Object> retvals = client.getJobResult("some-job-id") .get(0).getResults(); verify(1, getRequestedFor(urlEqualTo("/jobs/some-job-id")) .withHeader("Accept", equalTo("application/json"))); LinkedHashMap<String, String> i3 = new LinkedHashMap<>(); i3.put("new", "4.10.3-1"); i3.put("old", ""); LinkedHashMap<String, String> i3lock = new LinkedHashMap<>(); i3lock.put("new", "2.7-1"); i3lock.put("old", ""); LinkedHashMap<String, String> i3status = new LinkedHashMap<>(); i3status.put("new", "2.9-2"); i3status.put("old", ""); Map<String, Map<String, String>> expected = new LinkedHashMap<>(); expected.put("i3", i3); expected.put("i3lock", i3lock); expected.put("i3status", i3status); assertNotNull(retvals); assertTrue(retvals.containsKey("minion-1")); assertEquals(expected, retvals.get("minion-1")); } @Test public void testStartCommandAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_START_COMMAND_RESPONSE))); List<Object> args = new ArrayList<>(); args.add("i3"); Map<String, Object> kwargs = new LinkedHashMap<>(); kwargs.put("refresh", "true"); kwargs.put("sysupgrade", "false"); Future<ScheduledJob> future = client.startCommandAsync(new Glob(), "pkg.install", args, kwargs); ScheduledJob job = future.get(); verify(1, postRequestedFor(urlEqualTo("/minions")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalToJson(JSON_START_COMMAND_REQUEST))); assertNotNull(job); assertEquals("20150211105524392307", job.getJid()); assertEquals(Arrays.asList("myminion"), job.getMinions()); } @Test public void testStats() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_STATS_RESPONSE))); Stats stats = client.stats(); assertNotNull(stats); verify(1, getRequestedFor(urlEqualTo("/stats")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testStatsAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_STATS_RESPONSE))); Stats stats = client.statsAsync().get(); assertNotNull(stats); verify(1, getRequestedFor(urlEqualTo("/stats")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testKeys() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_KEYS_RESPONSE))); Key.Names keys = client.keys(); verifyKeys(keys); } @Test public void testKeysAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_KEYS_RESPONSE))); Key.Names keys = client.keysAsync().get(); verifyKeys(keys); } private void verifyKeys(Key.Names keys) { assertNotNull(keys); verify(1, getRequestedFor(urlEqualTo("/keys")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testJobs() throws Exception { final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE))); Map<String, Job> jobs = client.getJobs(); assertNotNull(jobs); Job job1 = jobs.get("20150304192951636258"); Job job2 = jobs.get("20150304200110485012"); assertEquals(Arrays.asList("enable-autodestruction"), job2.getArguments().getArgs()); assertEquals(0, job1.getArguments().getArgs().size()); assertEquals("2015-03-04 19:29:51", DATE_FORMAT.format(job1.getStartTime())); assertEquals("2015-03-04 20:01:10", DATE_FORMAT.format(job2.getStartTime())); verify(1, getRequestedFor(urlEqualTo("/jobs")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testJobsDiffTz() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE))); SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); TimeZone defaultTz = TimeZone.getDefault(); TimeZone tz = null; for (String zone : TimeZone.getAvailableIDs()) { tz = TimeZone.getTimeZone(zone); long diff = tz.getRawOffset() - df.getTimeZone().getRawOffset(); // Pick a TZ far enough from default to avoid possible DST issues if (Math.abs(diff) > 3600000 * 3) { break; } } Map<String, Job> jobs = client.getJobs(); Job job1 = jobs.get("20150304192951636258"); Job job2 = jobs.get("20150304200110485012"); assertEquals(df.parse("2015-03-04 19:29:51.636"), job1.getStartTime(defaultTz)); assertEquals(df.parse("2015-03-04 20:01:10.485"), job2.getStartTime(defaultTz)); assertEquals(df.parse("2015-03-04 19:29:51.636"), job1.getStartTime()); assertEquals(df.parse("2015-03-04 20:01:10.485"), job2.getStartTime()); df.setTimeZone(tz); assertEquals(df.parse("2015-03-04 19:29:51.636"), job1.getStartTime(tz)); assertEquals(df.parse("2015-03-04 20:01:10.485"), job2.getStartTime(tz)); assertNotEquals(job1.getStartTime(defaultTz), job1.getStartTime(tz)); assertNotEquals(job2.getStartTime(defaultTz), job2.getStartTime(tz)); verify(1, getRequestedFor(urlEqualTo("/jobs")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test(expected = JsonSyntaxException.class) public void testJobsWithInvalidStartTime() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_INVALID_START_TIME_RESPONSE))); client.getJobs(); } @Test public void testJobsWithNullStartTime() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_NULL_START_TIME_RESPONSE))); Map<String, Job> jobs = client.getJobs(); assertNotNull(jobs); Job job1 = jobs.get("20150304192951636258"); Job job2 = jobs.get("20150304200110485012"); assertNull(job1.getStartTime()); assertNull(job2.getStartTime()); assertNull(job1.getStartTime(TimeZone.getDefault())); assertNull(job2.getStartTime(TimeZone.getDefault())); verify(1, getRequestedFor(urlEqualTo("/jobs")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testJobsAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE))); Map<String, Job> jobs = client.getJobsAsync().get(); assertNotNull(jobs); assertEquals(Arrays.asList("enable-autodestruction"), jobs.get("20150304200110485012").getArguments().getArgs()); verify(1, getRequestedFor(urlEqualTo("/jobs")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testJobsPending() throws Exception { final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE_PENDING))); ResultInfoSet resultSet = client.getJobResult("some-job-id"); assertEquals(1, resultSet.size()); ResultInfo results = resultSet.get(0); HashSet<String> pendingMinions = new HashSet<String>(); pendingMinions.add("mira"); assertNotNull(results); assertEquals(0, results.getResults().size()); assertTrue(!results.getResult("mira").isPresent()); assertEquals("cmd.run", results.getFunction()); assertEquals("*", results.getTarget()); assertEquals("adamm", results.getUser()); assertEquals(pendingMinions, results.getMinions()); assertEquals(pendingMinions, results.getPendingMinions()); assertEquals("2015-08-06 16:55:13", DATE_FORMAT.format(results.getStartTime())); assertEquals("2015-08-06 16:55:13", DATE_FORMAT.format(results.getStartTime(DATE_FORMAT.getTimeZone()))); verify(1, getRequestedFor(urlEqualTo("/jobs/some-job-id")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testSendEvent() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_HOOK_RESPONSE))); JsonObject json = new JsonObject(); json.addProperty("foo", "bar"); JsonArray array = new JsonArray(); array.add(new JsonPrimitive("one")); array.add(new JsonPrimitive("two")); array.add(new JsonPrimitive("three")); json.add("list", array); String data = json.toString(); boolean success = client.sendEvent("my/tag", data); assertTrue(success); verify(1, postRequestedFor(urlEqualTo("/hook/my/tag")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalTo(data))); } @Test public void testSendEventAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_HOOK_RESPONSE))); JsonObject json = new JsonObject(); json.addProperty("foo", "bar"); JsonArray array = new JsonArray(); array.add(new JsonPrimitive("one")); array.add(new JsonPrimitive("two")); array.add(new JsonPrimitive("three")); json.add("list", array); String data = json.toString(); boolean success = client.sendEventAsync("my/tag", data).get(); assertTrue(success); verify(1, postRequestedFor(urlEqualTo("/hook/my/tag")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalTo(data))); } @Test public void testLogout() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_LOGOUT_RESPONSE))); boolean success = client.logout(); verifyLogout(success); } @Test public void testLogoutAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_LOGOUT_RESPONSE))); boolean success = client.logoutAsync().get(); verifyLogout(success); } private void verifyLogout(boolean success) { assertTrue(success); verify(1, postRequestedFor(urlEqualTo("/logout")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalTo(""))); } }
src/test/java/com/suse/saltstack/netapi/client/SaltStackClientTest.java
package com.suse.saltstack.netapi.client; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.any; import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; import static com.github.tomakehurst.wiremock.client.WireMock.equalToJson; import static com.github.tomakehurst.wiremock.client.WireMock.getRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.post; import static com.github.tomakehurst.wiremock.client.WireMock.postRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.client.WireMock.urlMatching; import static com.github.tomakehurst.wiremock.client.WireMock.verify; import static com.suse.saltstack.netapi.AuthModule.AUTO; import static com.suse.saltstack.netapi.AuthModule.PAM; import static com.suse.saltstack.netapi.config.ClientConfig.SOCKET_TIMEOUT; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import com.suse.saltstack.netapi.calls.wheel.Key; import com.suse.saltstack.netapi.client.impl.JDKConnectionFactory; import com.suse.saltstack.netapi.datatypes.Job; import com.suse.saltstack.netapi.datatypes.ScheduledJob; import com.suse.saltstack.netapi.datatypes.Token; import com.suse.saltstack.netapi.datatypes.cherrypy.Stats; import com.suse.saltstack.netapi.datatypes.target.Glob; import com.suse.saltstack.netapi.exception.SaltStackException; import com.suse.saltstack.netapi.exception.SaltUserUnauthorizedException; import com.suse.saltstack.netapi.results.ResultInfo; import com.suse.saltstack.netapi.results.ResultInfoSet; import com.suse.saltstack.netapi.utils.ClientUtils; import com.github.tomakehurst.wiremock.junit.WireMockRule; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import com.google.gson.JsonSyntaxException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.net.HttpURLConnection; import java.net.URI; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; /** * SaltStack API unit tests. */ public class SaltStackClientTest { private static final int MOCK_HTTP_PORT = 8888; static final String JSON_START_COMMAND_REQUEST = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/minions_request.json")); static final String JSON_START_COMMAND_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/minions_response.json")); static final String JSON_GET_MINIONS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/get_minions_response.json")); static final String JSON_GET_MINION_DETAILS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/minion_details_response.json")); static final String JSON_LOGIN_REQUEST = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/login_request.json")); static final String JSON_LOGIN_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/login_response.json")); static final String JSON_RUN_REQUEST = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/run_request.json")); static final String JSON_RUN_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/run_response.json")); static final String JSON_STATS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/stats_response.json")); static final String JSON_KEYS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/keys_response.json")); static final String JSON_JOBS_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/jobs_response.json")); static final String JSON_JOBS_RESPONSE_PENDING = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/jobs_response_pending.json")); static final String JSON_JOBS_RESPONSE_RESULT = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/jobs_response_result.json")); static final String JSON_JOBS_INVALID_START_TIME_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream( "/jobs_response_invalid_start_time.json")); static final String JSON_JOBS_NULL_START_TIME_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream( "/jobs_response_null_start_time.json")); static final String JSON_HOOK_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/hook_response.json")); static final String JSON_LOGOUT_RESPONSE = ClientUtils.streamToString( SaltStackClientTest.class.getResourceAsStream("/logout_response.json")); @Rule public WireMockRule wireMockRule = new WireMockRule(MOCK_HTTP_PORT); private SaltStackClient client; @Rule public ExpectedException exception = ExpectedException.none(); @Before public void init() { URI uri = URI.create("http://localhost:" + Integer.toString(MOCK_HTTP_PORT)); client = new SaltStackClient(uri); } @Test public void testLoginOk() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_LOGIN_RESPONSE))); Token token = client.login("user", "pass", AUTO); verifyLoginToken(token); } @Test public void testLoginAsyncOk() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_LOGIN_RESPONSE))); Future<Token> futureToken = client.loginAsync("user", "pass", AUTO); Token token = futureToken.get(); verifyLoginToken(token); } private void verifyLoginToken(Token token) { verify(1, postRequestedFor(urlEqualTo("/login")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalToJson(JSON_LOGIN_REQUEST))); assertEquals("Token mismatch", "f248284b655724ca8a86bcab4b8df608ebf5b08b", token.getToken()); assertEquals("EAuth mismatch", "auto", token.getEauth()); assertEquals("User mismatch", "user", token.getUser()); assertEquals("Perms mismatch", Arrays.asList(".*", "@wheel"), token.getPerms()); } @Test(expected = SaltUserUnauthorizedException.class) public void testLoginFailure() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_UNAUTHORIZED))); client.login("user", "pass", AUTO); } @Test(expected = ExecutionException.class) public void testLoginAsyncFailure() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_UNAUTHORIZED))); Future<Token> futureToken = client.loginAsync("user", "pass", AUTO); Token token = futureToken.get(); assertNull(token); } @Test public void testRunRequest() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_RUN_RESPONSE))); List<Object> args = new ArrayList<>(); args.add("i3"); Map<String, Object> kwargs = new LinkedHashMap<>(); kwargs.put("refresh", "true"); kwargs.put("sysupgrade", "false"); Map<String, Object> retvals = client.run("user", "pass", PAM, "local", new Glob(), "pkg.install", args, kwargs); verifyRunResults(retvals); } @Test public void testRunRequestAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_RUN_RESPONSE))); List<Object> args = new ArrayList<>(); args.add("i3"); Map<String, Object> kwargs = new LinkedHashMap<>(); kwargs.put("refresh", "true"); kwargs.put("sysupgrade", "false"); Future<Map<String, Object>> future = client.runAsync("user", "pass", PAM, "local", new Glob(), "pkg.install", args, kwargs); Map<String, Object> retvals = future.get(); verifyRunResults(retvals); } private void verifyRunResults(Map<String, Object> retvals) { verify(1, postRequestedFor(urlEqualTo("/run")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalToJson(JSON_RUN_REQUEST))); LinkedHashMap<String, String> i3 = new LinkedHashMap<>(); i3.put("new", "4.10.3-1"); i3.put("old", ""); LinkedHashMap<String, String> i3lock = new LinkedHashMap<>(); i3lock.put("new", "2.7-1"); i3lock.put("old", ""); LinkedHashMap<String, String> i3status = new LinkedHashMap<>(); i3status.put("new", "2.9-2"); i3status.put("old", ""); Map<String, Map<String, String>> expected = new LinkedHashMap<>(); expected.put("i3", i3); expected.put("i3lock", i3lock); expected.put("i3status", i3status); assertNotNull(retvals); assertTrue(retvals.containsKey("minion-1")); assertEquals(expected, retvals.get("minion-1")); } @Test public void testRunRequestWithSocketTimeout() throws Exception { exception.expect(SaltStackException.class); exception.expectMessage(containsString("Read timed out")); // create a local SaltStackClient with a fast timeout configuration // to do not lock tests more than 2s URI uri = URI.create("http://localhost:" + Integer.toString(MOCK_HTTP_PORT)); SaltStackClient clientWithFastTimeout = new SaltStackClient(uri); clientWithFastTimeout.getConfig().put(SOCKET_TIMEOUT, 1000); stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withFixedDelay(2000))); clientWithFastTimeout.login("user", "pass", AUTO); } @Test public void testRunRequestWithSocketTimeoutThroughJDKConnection() throws Exception { exception.expect(SaltStackException.class); exception.expectMessage(containsString("Read timed out")); // create a local SaltStackClient with a fast timeout configuration // to do not lock tests more than 2s URI uri = URI.create("http://localhost:" + Integer.toString(MOCK_HTTP_PORT)); SaltStackClient clientWithFastTimeout = new SaltStackClient(uri, new JDKConnectionFactory()); clientWithFastTimeout.getConfig().put(SOCKET_TIMEOUT, 1000); stubFor(post(urlEqualTo("/login")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .willReturn(aResponse() .withFixedDelay(2000))); clientWithFastTimeout.login("user", "pass", AUTO); } @Test @SuppressWarnings("unchecked") public void testGetMinions() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Accept", "application/json") .withBody(JSON_GET_MINIONS_RESPONSE))); Map<String, Map<String, Object>> minions = client.getMinions(); verifyMinions(minions); } @Test @SuppressWarnings("unchecked") public void testGetMinionsAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Accept", "application/json") .withBody(JSON_GET_MINIONS_RESPONSE))); Future<Map<String, Map<String, Object>>> future = client.getMinionsAsync(); Map<String, Map<String, Object>> minions = future.get(); verifyMinions(minions); } private void verifyMinions(Map<String, Map<String, Object>> minions) { verify(1, getRequestedFor(urlEqualTo("/minions")) .withHeader("Accept", equalTo("application/json"))); assertNotNull(minions); assertEquals(2, minions.size()); assertTrue(minions.containsKey("minion1")); assertTrue(minions.containsKey("minion2")); Map<String, Object> minion1 = minions.get("minion1"); assertEquals(56, minion1.size()); assertEquals("VirtualBox", minion1.get("biosversion")); assertTrue(minion1.get("saltversioninfo") instanceof List); List<String> saltVersionInfo = (List<String>) minion1.get("saltversioninfo"); assertEquals(2014.0, saltVersionInfo.get(0)); assertEquals(7.0, saltVersionInfo.get(1)); assertEquals(5.0, saltVersionInfo.get(2)); assertEquals(0.0, saltVersionInfo.get(3)); assertTrue(minion1.get("locale_info") instanceof Map); Map<String, String> localeInfo = ((Map<String, String>) minion1.get("locale_info")); assertEquals("en_US", localeInfo.get("defaultlanguage")); assertEquals("UTF-8", localeInfo.get("defaultencoding")); } @Test @SuppressWarnings("unchecked") public void testGetMinionDetails() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Accept", "application/json") .withBody(JSON_GET_MINION_DETAILS_RESPONSE))); Map<String, Object> minion = client.getMinionDetails("minion2"); verifyMinionsDetails(minion); } @Test @SuppressWarnings("unchecked") public void testGetMinionDetailsAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Accept", "application/json") .withBody(JSON_GET_MINION_DETAILS_RESPONSE))); Future<Map<String, Object>> future = client.getMinionDetailsAsync("minion2"); Map<String, Object> minion = future.get(); verifyMinionsDetails(minion); } private void verifyMinionsDetails(Map<String, Object> minion) { verify(1, getRequestedFor(urlEqualTo("/minions/minion2")) .withHeader("Accept", equalTo("application/json"))); assertNotNull(minion); assertEquals(56, minion.size()); assertEquals("VirtualBox", minion.get("biosversion")); assertTrue(minion.get("saltversioninfo") instanceof List); List<String> saltVersionInfo = (List<String>) minion.get("saltversioninfo"); assertEquals(2014.0, saltVersionInfo.get(0)); assertEquals(7.0, saltVersionInfo.get(1)); assertEquals(5.0, saltVersionInfo.get(2)); assertEquals(0.0, saltVersionInfo.get(3)); assertTrue(minion.get("locale_info") instanceof Map); Map<String, String> localeInfo = ((Map<String, String>) minion.get("locale_info")); assertEquals("en_US", localeInfo.get("defaultlanguage")); assertEquals("UTF-8", localeInfo.get("defaultencoding")); } @Test public void testStartCommand() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_START_COMMAND_RESPONSE))); List<Object> args = new ArrayList<>(); args.add("i3"); Map<String, Object> kwargs = new LinkedHashMap<>(); kwargs.put("refresh", "true"); kwargs.put("sysupgrade", "false"); ScheduledJob job = client.startCommand(new Glob(), "pkg.install", args, kwargs); verify(1, postRequestedFor(urlEqualTo("/minions")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalToJson(JSON_START_COMMAND_REQUEST))); assertNotNull(job); assertEquals("20150211105524392307", job.getJid()); assertEquals(Arrays.asList("myminion"), job.getMinions()); } @Test public void testQueryJobResult() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE_RESULT))); Map<String, Object> retvals = client.getJobResult("some-job-id") .get(0).getResults(); verify(1, getRequestedFor(urlEqualTo("/jobs/some-job-id")) .withHeader("Accept", equalTo("application/json"))); LinkedHashMap<String, String> i3 = new LinkedHashMap<>(); i3.put("new", "4.10.3-1"); i3.put("old", ""); LinkedHashMap<String, String> i3lock = new LinkedHashMap<>(); i3lock.put("new", "2.7-1"); i3lock.put("old", ""); LinkedHashMap<String, String> i3status = new LinkedHashMap<>(); i3status.put("new", "2.9-2"); i3status.put("old", ""); Map<String, Map<String, String>> expected = new LinkedHashMap<>(); expected.put("i3", i3); expected.put("i3lock", i3lock); expected.put("i3status", i3status); assertNotNull(retvals); assertTrue(retvals.containsKey("minion-1")); assertEquals(expected, retvals.get("minion-1")); } @Test public void testStartCommandAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_START_COMMAND_RESPONSE))); List<Object> args = new ArrayList<>(); args.add("i3"); Map<String, Object> kwargs = new LinkedHashMap<>(); kwargs.put("refresh", "true"); kwargs.put("sysupgrade", "false"); Future<ScheduledJob> future = client.startCommandAsync(new Glob(), "pkg.install", args, kwargs); ScheduledJob job = future.get(); verify(1, postRequestedFor(urlEqualTo("/minions")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalToJson(JSON_START_COMMAND_REQUEST))); assertNotNull(job); assertEquals("20150211105524392307", job.getJid()); assertEquals(Arrays.asList("myminion"), job.getMinions()); } @Test public void testStats() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_STATS_RESPONSE))); Stats stats = client.stats(); assertNotNull(stats); verify(1, getRequestedFor(urlEqualTo("/stats")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testStatsAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_STATS_RESPONSE))); Stats stats = client.statsAsync().get(); assertNotNull(stats); verify(1, getRequestedFor(urlEqualTo("/stats")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testKeys() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_KEYS_RESPONSE))); Key.Names keys = client.keys(); verifyKeys(keys); } @Test public void testKeysAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_KEYS_RESPONSE))); Key.Names keys = client.keysAsync().get(); verifyKeys(keys); } private void verifyKeys(Key.Names keys) { assertNotNull(keys); verify(1, getRequestedFor(urlEqualTo("/keys")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testJobs() throws Exception { final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE))); Map<String, Job> jobs = client.getJobs(); assertNotNull(jobs); Job job1 = jobs.get("20150304192951636258"); Job job2 = jobs.get("20150304200110485012"); assertEquals(Arrays.asList("enable-autodestruction"), job2.getArguments().getArgs()); assertEquals(0, job1.getArguments().getArgs().size()); assertEquals("2015-03-04 19:29:51", DATE_FORMAT.format(job1.getStartTime())); assertEquals("2015-03-04 20:01:10", DATE_FORMAT.format(job2.getStartTime())); verify(1, getRequestedFor(urlEqualTo("/jobs")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testJobsDiffTz() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE))); SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); TimeZone defaultTz = TimeZone.getDefault(); TimeZone tz = null; for (String zone : TimeZone.getAvailableIDs()) { tz = TimeZone.getTimeZone(zone); long diff = tz.getRawOffset() - df.getTimeZone().getRawOffset(); // Pick a TZ far enough from default to avoid possible DST issues if (Math.abs(diff) > 3600000 * 3) { break; } } Map<String, Job> jobs = client.getJobs(); Job job1 = jobs.get("20150304192951636258"); Job job2 = jobs.get("20150304200110485012"); assertEquals(df.parse("2015-03-04 19:29:51.636"), job1.getStartTime(defaultTz)); assertEquals(df.parse("2015-03-04 20:01:10.485"), job2.getStartTime(defaultTz)); assertEquals(df.parse("2015-03-04 19:29:51.636"), job1.getStartTime()); assertEquals(df.parse("2015-03-04 20:01:10.485"), job2.getStartTime()); df.setTimeZone(tz); assertEquals(df.parse("2015-03-04 19:29:51.636"), job1.getStartTime(tz)); assertEquals(df.parse("2015-03-04 20:01:10.485"), job2.getStartTime(tz)); assertNotEquals(job1.getStartTime(defaultTz), job1.getStartTime(tz)); assertNotEquals(job2.getStartTime(defaultTz), job2.getStartTime(tz)); verify(1, getRequestedFor(urlEqualTo("/jobs")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test(expected = JsonSyntaxException.class) public void testJobsWithInvalidStartTime() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_INVALID_START_TIME_RESPONSE))); client.getJobs(); } @Test public void testJobsWithNullStartTime() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_NULL_START_TIME_RESPONSE))); Map<String, Job> jobs = client.getJobs(); assertNotNull(jobs); Job job1 = jobs.get("20150304192951636258"); Job job2 = jobs.get("20150304200110485012"); assertNull(job1.getStartTime()); assertNull(job2.getStartTime()); assertNull(job1.getStartTime(TimeZone.getDefault())); assertNull(job2.getStartTime(TimeZone.getDefault())); verify(1, getRequestedFor(urlEqualTo("/jobs")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testJobsAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE))); Map<String, Job> jobs = client.getJobsAsync().get(); assertNotNull(jobs); assertEquals(Arrays.asList("enable-autodestruction"), jobs.get("20150304200110485012").getArguments().getArgs()); verify(1, getRequestedFor(urlEqualTo("/jobs")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testJobsPending() throws Exception { final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_JOBS_RESPONSE_PENDING))); ResultInfoSet resultSet = client.getJobResult("some-job-id"); assertEquals(1, resultSet.size()); ResultInfo results = resultSet.get(0); HashSet<String> pendingMinions = new HashSet<String>(); pendingMinions.add("mira"); assertNotNull(results); assertEquals(0, results.getResults().size()); assertTrue(!results.getResult("mira").isPresent()); assertEquals("cmd.run", results.getFunction()); assertEquals("*", results.getTarget()); assertEquals("adamm", results.getUser()); assertEquals(pendingMinions, results.getMinions()); assertEquals(pendingMinions, results.getPendingMinions()); assertEquals("2015-08-06 16:55:13", DATE_FORMAT.format(results.getStartTime())); assertEquals("2015-08-06 16:55:13", DATE_FORMAT.format(results.getStartTime(DATE_FORMAT.getTimeZone()))); verify(1, getRequestedFor(urlEqualTo("/jobs/some-job-id")) .withHeader("Accept", equalTo("application/json")) .withRequestBody(equalTo(""))); } @Test public void testSendEvent() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_HOOK_RESPONSE))); JsonObject json = new JsonObject(); json.addProperty("foo", "bar"); JsonArray array = new JsonArray(); array.add(new JsonPrimitive("one")); array.add(new JsonPrimitive("two")); array.add(new JsonPrimitive("three")); json.add("list", array); String data = json.toString(); boolean success = client.sendEvent("my/tag", data); assertTrue(success); verify(1, postRequestedFor(urlEqualTo("/hook/my/tag")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalTo(data))); } @Test public void testSendEventAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_HOOK_RESPONSE))); JsonObject json = new JsonObject(); json.addProperty("foo", "bar"); JsonArray array = new JsonArray(); array.add(new JsonPrimitive("one")); array.add(new JsonPrimitive("two")); array.add(new JsonPrimitive("three")); json.add("list", array); String data = json.toString(); boolean success = client.sendEventAsync("my/tag", data).get(); assertTrue(success); verify(1, postRequestedFor(urlEqualTo("/hook/my/tag")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalTo(data))); } @Test public void testLogout() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_LOGOUT_RESPONSE))); boolean success = client.logout(); verifyLogout(success); } @Test public void testLogoutAsync() throws Exception { stubFor(any(urlMatching(".*")) .willReturn(aResponse() .withStatus(HttpURLConnection.HTTP_OK) .withHeader("Content-Type", "application/json") .withBody(JSON_LOGOUT_RESPONSE))); boolean success = client.logoutAsync().get(); verifyLogout(success); } private void verifyLogout(boolean success) { assertTrue(success); verify(1, postRequestedFor(urlEqualTo("/logout")) .withHeader("Accept", equalTo("application/json")) .withHeader("Content-Type", equalTo("application/json")) .withRequestBody(equalTo(""))); } }
Move annotations after refactoring accordingly
src/test/java/com/suse/saltstack/netapi/client/SaltStackClientTest.java
Move annotations after refactoring accordingly
Java
epl-1.0
7342e6d4dfca2cd3d499bedbb7a7c82c839232fa
0
gnuarmeclipse/plug-ins,gnuarmeclipse/plug-ins,gnuarmeclipse/plug-ins,gnuarmeclipse/plug-ins,gnuarmeclipse/plug-ins
/******************************************************************************* * Copyright (c) 2014 Liviu Ionescu. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Liviu Ionescu - initial version *******************************************************************************/ package ilg.gnumcueclipse.debug.gdbjtag.datamodel; import java.math.BigInteger; import java.util.LinkedList; import java.util.List; import ilg.gnumcueclipse.packs.core.tree.Leaf; import ilg.gnumcueclipse.packs.core.tree.Node; /** * As per SVD 1.1, <i>"A cluster describes a sequence of registers within a * peripheral. A cluster has an base offset relative to the base address of the * peripheral. All registers within a cluster specify their address offset * relative to the cluster base address. Register and cluster sections can occur * in an arbitrary order."</i> */ public class SvdClusterDMNode extends SvdDMNode { // ------------------------------------------------------------------------ public SvdClusterDMNode(Leaf node) { super(node); } @Override public void dispose() { super.dispose(); } // ------------------------------------------------------------------------ @Override protected SvdObjectDMNode[] prepareChildren(Leaf node) { if (node == null || !node.hasChildren()) { return null; } // System.out.println("prepareChildren(" + node.getName() + // ")"); List<SvdObjectDMNode> list = new LinkedList<SvdObjectDMNode>(); for (Leaf child : ((Node) node).getChildren()) { // Keep only <register> and <cluster> nodes if (child.isType("register")) { list.add(new SvdRegisterDMNode(child)); } else if (child.isType("cluster")) { list.add(new SvdClusterDMNode(child)); } } if (getNode().getPackType() == Node.PACK_TYPE_XPACK) { Leaf group = ((Node) node).findChild("registers"); if (group != null && group.hasChildren()) { for (Leaf child : ((Node) group).getChildren()) { // Keep only <register> and <cluster> nodes if (child.isType("register")) { list.add(new SvdRegisterDMNode(child)); } else if (child.isType("cluster")) { list.add(new SvdClusterDMNode(child)); } } } Leaf clusters = ((Node) node).findChild("clusters"); if (clusters != null && clusters.hasChildren()) { for (Leaf child : ((Node) clusters).getChildren()) { if (child.isType("cluster")) { list.add(new SvdClusterDMNode(child)); } } } } SvdObjectDMNode[] array = list.toArray(new SvdObjectDMNode[list.size()]); // Preserve apparition order. return array; } public BigInteger getBigAddressOffset() { String str = getNode().getProperty("addressOffset"); if (!str.isEmpty()) { return SvdUtils.parseScaledNonNegativeBigInteger(str); } else { return BigInteger.ZERO; } } @Override public BigInteger getBigRepeatIncrement() { BigInteger bigRepeatIncrement = getBigArrayAddressIncrement(); if (bigRepeatIncrement != BigInteger.ZERO) { return bigRepeatIncrement; } return null; } // ------------------------------------------------------------------------ }
bundles/ilg.gnumcueclipse.debug.gdbjtag/src/ilg/gnumcueclipse/debug/gdbjtag/datamodel/SvdClusterDMNode.java
/******************************************************************************* * Copyright (c) 2014 Liviu Ionescu. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Liviu Ionescu - initial version *******************************************************************************/ package ilg.gnumcueclipse.debug.gdbjtag.datamodel; import java.math.BigInteger; import java.util.LinkedList; import java.util.List; import ilg.gnumcueclipse.packs.core.tree.Leaf; import ilg.gnumcueclipse.packs.core.tree.Node; /** * As per SVD 1.1, <i>"A cluster describes a sequence of registers within a * peripheral. A cluster has an base offset relative to the base address of the * peripheral. All registers within a cluster specify their address offset * relative to the cluster base address. Register and cluster sections can occur * in an arbitrary order."</i> */ public class SvdClusterDMNode extends SvdDMNode { // ------------------------------------------------------------------------ public SvdClusterDMNode(Leaf node) { super(node); } @Override public void dispose() { super.dispose(); } // ------------------------------------------------------------------------ @Override protected SvdObjectDMNode[] prepareChildren(Leaf node) { if (node == null || !node.hasChildren()) { return null; } // System.out.println("prepareChildren(" + node.getName() + // ")"); List<SvdObjectDMNode> list = new LinkedList<SvdObjectDMNode>(); for (Leaf child : ((Node) node).getChildren()) { // Keep only <register> and <cluster> nodes if (child.isType("register")) { list.add(new SvdRegisterDMNode(child)); } else if (child.isType("cluster")) { list.add(new SvdClusterDMNode(child)); } } if (getNode().getPackType() == Node.PACK_TYPE_XPACK) { Leaf group = ((Node) node).findChild("registers"); if (group != null && group.hasChildren()) { for (Leaf child : ((Node) group).getChildren()) { // Keep only <register> and <cluster> nodes if (child.isType("register")) { list.add(new SvdRegisterDMNode(child)); } else if (child.isType("cluster")) { list.add(new SvdClusterDMNode(child)); } } } Leaf clusters = ((Node) node).findChild("clusters"); if (clusters != null && clusters.hasChildren()) { for (Leaf child : ((Node) clusters).getChildren()) { if (child.isType("cluster")) { list.add(new SvdClusterDMNode(child)); } } } } SvdObjectDMNode[] array = list.toArray(new SvdObjectDMNode[list.size()]); // Preserve apparition order. return array; } public BigInteger getBigAddressOffset() { String str = getNode().getProperty("addressOffset"); if (!str.isEmpty()) { return SvdUtils.parseScaledNonNegativeBigInteger(str); } else { return BigInteger.ZERO; } } // ------------------------------------------------------------------------ }
[#336] fix cluster increment
bundles/ilg.gnumcueclipse.debug.gdbjtag/src/ilg/gnumcueclipse/debug/gdbjtag/datamodel/SvdClusterDMNode.java
[#336] fix cluster increment
Java
agpl-3.0
0caddf8326b4b419d27824019961d830674bec45
0
RapidInfoSys/Rapid,RapidInfoSys/Rapid,RapidInfoSys/Rapid,RapidInfoSys/Rapid
/* Copyright (C) 2019 - Gareth Edwards / Rapid Information Systems [email protected] This file is part of the Rapid Application Platform Rapid is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The terms require you to include the original copyright, and the license notice in all redistributions. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License in a file named "COPYING". If not, see <http://www.gnu.org/licenses/>. */ package com.rapid.server; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.sql.Driver; import java.sql.DriverManager; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Scanner; import java.util.Set; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import javax.xml.xpath.XPathExpressionException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.xml.sax.SAXException; import com.rapid.core.Action; import com.rapid.core.Application; import com.rapid.core.Application.RapidLoadingException; import com.rapid.core.Applications; import com.rapid.core.Applications.Versions; import com.rapid.core.Device.Devices; import com.rapid.core.Email; import com.rapid.core.Process; import com.rapid.core.Theme; import com.rapid.core.Workflow; import com.rapid.core.Workflows; import com.rapid.utils.Classes; import com.rapid.utils.Comparators; import com.rapid.utils.Encryption.EncryptionProvider; import com.rapid.utils.Files; import com.rapid.utils.Https; import com.rapid.utils.JAXB.EncryptedXmlAdapter; import com.rapid.utils.Strings; public class RapidServletContextListener implements ServletContextListener { // the logger which we will initialise private static Logger _logger; // the schema factory that we will load the actions and controls schemas into private static SchemaFactory _schemaFactory; // all of the classes we are going to put into our jaxb context private static ArrayList<Class> _jaxbClasses; // enterprise monitor protected static Monitor _monitor = new Monitor(); // public static methods public static void logFileNames(File dir, String rootPath) { for (File file : dir.listFiles()) { if (file.isDirectory()) { logFileNames(file, rootPath); } else { String fileName = file.toString(); _logger.info(fileName.substring(rootPath.length())); } } } public static int loadLogins(ServletContext servletContext) throws Exception { int loginCount = 0; // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/logins/"); // if the directory exists if (dir.exists()) { // create an array list of json objects to hold the logins ArrayList<JSONObject> logins = new ArrayList<>(); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".login.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/login.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // read the xml into a string String xml = Strings.getString(xmlFile); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonLogin = org.json.XML.toJSONObject(xml).getJSONObject("login"); // add to array list logins.add(jsonLogin); // increment the count loginCount++; } // put the logins in a context attribute (this is available to the security adapters on initialisation) servletContext.setAttribute("jsonLogins", logins); } _logger.info(loginCount + " logins loaded from .login.xml files"); return loginCount; } public static int loadDatabaseDrivers(ServletContext servletContext) throws Exception { // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/databaseDrivers.xsd")); // create a validator Validator validator = schema.newValidator(); // read the xml into a string String xml = Strings.getString(new File(servletContext.getRealPath("/") + "/WEB-INF/database/" + "/databaseDrivers.xml")); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the xml string into JSON JSONObject jsonDatabaseDriverCollection = org.json.XML.toJSONObject(xml).getJSONObject("databaseDrivers"); // prepare the array we are going to popoulate JSONArray jsonDatabaseDrivers = new JSONArray(); JSONObject jsonDatabaseDriver; int index = 0; int count = 0; if (jsonDatabaseDriverCollection.optJSONArray("databaseDriver") == null) { jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONObject("databaseDriver"); } else { jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").getJSONObject(index); count = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").length(); } do { _logger.info("Registering database driver " + jsonDatabaseDriver.getString("name") + " using " + jsonDatabaseDriver.getString("class")); try { // check this type does not already exist for (int i = 0; i < jsonDatabaseDrivers.length(); i++) { if (jsonDatabaseDriver.getString("name").equals(jsonDatabaseDrivers.getJSONObject(i).getString("name"))) throw new Exception(" database driver type is loaded already. Type names must be unique"); } // get the class name String className = jsonDatabaseDriver.getString("class"); // get the current thread class loader (this should log better if there are any issues) ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); // check we got a class loader if (classLoader == null) { // register the class the old fashioned way so the DriverManager can find it Class.forName(className); } else { // register the class on this thread so we can catch any errors Class.forName(className, true, classLoader); } // add the jsonControl to our array jsonDatabaseDrivers.put(jsonDatabaseDriver); } catch (Exception ex) { _logger.error("Error registering database driver : " + ex.getMessage(), ex); } // inc the count of controls in this file index++; // get the next one if (index < count) jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").getJSONObject(index); } while (index < count); // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonDatabaseDrivers", jsonDatabaseDrivers); _logger.info(index + " database drivers loaded from databaseDrivers.xml file"); return index; } // loop all of the .connectionAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation public static int loadConnectionAdapters(ServletContext servletContext) throws Exception { int adapterCount = 0; // retain our class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> connectionConstructors = new HashMap<>(); // create an array list of json objects which we will sort later according to the order ArrayList<JSONObject> connectionAdapters = new ArrayList<>(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/database/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".connectionadapter.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/connectionAdapter.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // read the xml into a string String xml = Strings.getString(xmlFile); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonConnectionAdapter = org.json.XML.toJSONObject(xml).getJSONObject("connectionAdapter"); // get the class name from the json String className = jsonConnectionAdapter.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.data.ConnectionAdapter if (!Classes.extendsClass(classClass, com.rapid.data.ConnectionAdapter.class)) throw new Exception(classClass.getCanonicalName() + " must extend com.rapid.data.ConnectionAdapter"); // check this class is unique if (connectionConstructors.get(className) != null) throw new Exception(className + " connection adapter already loaded."); // add to constructors hashmap referenced by type connectionConstructors.put(className, classClass.getConstructor(ServletContext.class, String.class, String.class, String.class, String.class)); // add to to our array list connectionAdapters.add(jsonConnectionAdapter); // increment the count adapterCount++; } // sort the connection adapters according to their order property Collections.sort(connectionAdapters, new Comparator<JSONObject>() { @Override public int compare(JSONObject o1, JSONObject o2) { try { return o1.getInt("order") - o2.getInt("order"); } catch (JSONException e) { return 999; } } }); // create a JSON Array object which will hold json for all of the available security adapters JSONArray jsonConnectionAdapters = new JSONArray(); // loop the sorted connection adapters and add to the json array for (JSONObject jsonConnectionAdapter : connectionAdapters) jsonConnectionAdapters.put(jsonConnectionAdapter); // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonConnectionAdapters", jsonConnectionAdapters); // put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("securityConstructors", connectionConstructors); _logger.info(adapterCount + " connection adapters loaded in .connectionAdapter.xml files"); return adapterCount; } // loop all of the .securityAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation public static int loadSecurityAdapters(ServletContext servletContext) throws Exception { int adapterCount = 0; // retain our class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> securityConstructors = new HashMap<>(); // create a JSON Array object which will hold json for all of the available security adapters JSONArray jsonSecurityAdapters = new JSONArray(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/security/"); // create a filter for finding .securityadapter.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".securityadapter.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/securityAdapter.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // read the xml into a string String xml = Strings.getString(xmlFile); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonSecurityAdapter = org.json.XML.toJSONObject(xml).getJSONObject("securityAdapter"); // get the type from the json String type = jsonSecurityAdapter.getString("type"); // get the class name from the json String className = jsonSecurityAdapter.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.security.SecurityAdapter if (!Classes.extendsClass(classClass, com.rapid.security.SecurityAdapter.class)) throw new Exception(type + " security adapter class " + classClass.getCanonicalName() + " must extend com.rapid.security.SecurityAdapter"); // check this type is unique if (securityConstructors.get(type) != null) throw new Exception(type + " security adapter already loaded. Type names must be unique."); // add to constructors hashmap referenced by type securityConstructors.put(type, classClass.getConstructor(ServletContext.class, Application.class)); // add to our collection jsonSecurityAdapters.put(jsonSecurityAdapter); // increment the count adapterCount++; } // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonSecurityAdapters", jsonSecurityAdapters); // put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("securityConstructors", securityConstructors); _logger.info(adapterCount + " security adapters loaded in .securityAdapter.xml files"); return adapterCount; } // loop all of the .securityAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation public static int loadFormAdapters(ServletContext servletContext) throws Exception { int adapterCount = 0; // retain our form adapter class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> formConstructors = new HashMap<>(); // retain our payment class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> paymentConstructors = new HashMap<>(); // create a JSON Array object which will hold json for all of the available security adapters JSONArray jsonAdapters = new JSONArray(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/forms/"); // create a filter for finding .formadapter.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".formadapter.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/formAdapter.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // read the xml into a string String xml = Strings.getString(xmlFile); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonFormAdapter = org.json.XML.toJSONObject(xml).getJSONObject("formAdapter"); // get the type from the json String type = jsonFormAdapter.getString("type"); // get the class name from the json String className = jsonFormAdapter.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.forms.FormAdapter if (!Classes.extendsClass(classClass, com.rapid.forms.FormAdapter.class)) throw new Exception(type + " form adapter class " + classClass.getCanonicalName() + " must extend com.rapid.forms.FormsAdapter"); // check this type is unique if (formConstructors.get(type) != null) throw new Exception(type + " form adapter already loaded. Type names must be unique."); // add to constructors hashmap referenced by type formConstructors.put(type, classClass.getConstructor(ServletContext.class, Application.class, String.class)); // look for a paymentGateway class className = jsonFormAdapter.optString("paymentClass", null); // if a payment class was provided and we don't yet have a constructor for this payment class if (className != null && paymentConstructors.get(className) == null) { // get the payment class classClass = Class.forName(className); // check the class implements com.rapid.forms.PaymentGateway if (!Classes.extendsClass(classClass, com.rapid.forms.PaymentGateway.class)) throw new Exception(type + " form adapter paymentClass " + classClass.getCanonicalName() + " must extend com.rapid.forms.PaymentGateway"); // add to constructors hashmap referenced by type paymentConstructors.put(className, classClass.getConstructor(ServletContext.class, Application.class)); } // add to our collection jsonAdapters.put(jsonFormAdapter); // increment the count adapterCount++; } // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonFormAdapters", jsonAdapters); // put the constructors hashmap in a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("formConstructors", formConstructors); // put the constructors hashmap in a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("paymentConstructors", paymentConstructors); // log _logger.info(adapterCount + " form adapters loaded in .formAdapter.xml files"); return adapterCount; } // loop all of the .action.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation public static int loadActions(ServletContext servletContext) throws Exception { // assume no actions int actionCount = 0; // create a list of json actions which we will sort later List<JSONObject> jsonActions = new ArrayList<>(); // retain our class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> actionConstructors = new HashMap<>(); // build a collection of classes so we can re-initilise the JAXB context to recognise our injectable classes ArrayList<Action> actions = new ArrayList<>(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/actions/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".action.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/action.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // get a scanner to read the file Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A"); // read the xml into a string String xml = fileScanner.next(); // close the scanner (and file) fileScanner.close(); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonActionCollection = org.json.XML.toJSONObject(xml).getJSONObject("actions"); JSONObject jsonAction; int index = 0; int count = 0; // the JSON library will add a single key of there is a single class, otherwise an array if (jsonActionCollection.optJSONArray("action") == null) { jsonAction = jsonActionCollection.getJSONObject("action"); } else { jsonAction = jsonActionCollection.getJSONArray("action").getJSONObject(index); count = jsonActionCollection.getJSONArray("action").length(); } do { // check this type does not already exist for (int i = 0; i < jsonActions.size(); i++) { if (jsonAction.getString("type").equals(jsonActions.get(i).getString("type"))) throw new Exception(" action type is loaded already. Type names must be unique"); } // add the jsonControl to our array jsonActions.add(jsonAction); // get the named type from the json String type = jsonAction.getString("type"); // get the class name from the json String className = jsonAction.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.Action if (!Classes.extendsClass(classClass, com.rapid.core.Action.class)) throw new Exception(type + " action class " + classClass.getCanonicalName() + " must extend com.rapid.core.Action."); // check this type is unique if (actionConstructors.get(type) != null) throw new Exception(type + " action already loaded. Type names must be unique."); // add to constructors hashmap referenced by type actionConstructors.put(type, classClass.getConstructor(RapidHttpServlet.class, JSONObject.class)); // add to our jaxb classes collection _jaxbClasses.add(classClass); // inc the control count actionCount ++; // inc the count of controls in this file index++; // get the next one if (index < count) jsonAction = jsonActionCollection.getJSONArray("control").getJSONObject(index); } while (index < count); } // sort the list of actions by name Collections.sort(jsonActions, new Comparator<JSONObject>() { @Override public int compare(JSONObject c1, JSONObject c2) { try { return Comparators.AsciiCompare(c1.getString("name"), c2.getString("name"), false); } catch (JSONException e) { return 0; } } }); // create a JSON Array object which will hold json for all of the available controls JSONArray jsonArrayActions = new JSONArray(jsonActions); // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonActions", jsonArrayActions); // put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("actionConstructors", actionConstructors); _logger.info(actionCount + " actions loaded in .action.xml files"); return actionCount; } // here we loop all of the control.xml files and instantiate the json class object/functions and cache them in the servletContext public static int loadControls(ServletContext servletContext) throws Exception { // assume no controls int controlCount = 0; // create a list for our controls List<JSONObject> jsonControls = new ArrayList<>(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/controls/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".control.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/control.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // get a scanner to read the file Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A"); // read the xml into a string String xml = fileScanner.next(); // close the scanner (and file) fileScanner.close(); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonControlCollection = org.json.XML.toJSONObject(xml).getJSONObject("controls"); JSONObject jsonControl; int index = 0; int count = 0; if (jsonControlCollection.optJSONArray("control") == null) { jsonControl = jsonControlCollection.getJSONObject("control"); } else { jsonControl = jsonControlCollection.getJSONArray("control").getJSONObject(index); count = jsonControlCollection.getJSONArray("control").length(); } do { // check this type does not already exist for (int i = 0; i < jsonControls.size(); i++) { if (jsonControl.getString("type").equals(jsonControls.get(i).getString("type"))) throw new Exception(" control type is loaded already. Type names must be unique"); } // add the jsonControl to our array jsonControls.add(jsonControl); // inc the control count controlCount ++; // inc the count of controls in this file index++; // get the next one if (index < count) jsonControl = jsonControlCollection.getJSONArray("control").getJSONObject(index); } while (index < count); } // sort the list of controls by name Collections.sort(jsonControls, new Comparator<JSONObject>() { @Override public int compare(JSONObject c1, JSONObject c2) { try { return Comparators.AsciiCompare(c1.getString("name"), c2.getString("name"), false); } catch (JSONException e) { return 0; } } }); // create a JSON Array object which will hold json for all of the available controls JSONArray jsonArrayControls = new JSONArray(jsonControls); // put the jsonControls in a context attribute (this is available via the getJsonControls method in RapidHttpServlet) servletContext.setAttribute("jsonControls", jsonArrayControls); _logger.info(controlCount + " controls loaded in .control.xml files"); return controlCount; } // here we loop all of the theme.xml files and instantiate the json class object/functions and cache them in the servletContext public static int loadThemes(ServletContext servletContext) throws Exception { // assume no themes int themeCount = 0; // create a list for our themes List<Theme> themes = new ArrayList<>(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/themes/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".theme.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/theme.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // get a scanner to read the file Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A"); // read the xml into a string String xml = fileScanner.next(); // close the scanner (and file) fileScanner.close(); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // create a theme object from the xml Theme theme = new Theme(xml); // add it to our collection themes.add(theme); // inc the template count themeCount ++; } // sort the list of templates by name Collections.sort(themes, new Comparator<Theme>() { @Override public int compare(Theme t1, Theme t2) { return Comparators.AsciiCompare(t1.getName(), t2.getName(), false); } }); // put the jsonControls in a context attribute (this is available via the getJsonControls method in RapidHttpServlet) servletContext.setAttribute("themes", themes); _logger.info(themeCount + " themes loaded in .theme.xml files"); return themeCount; } // Here we loop all of the folders under "applications" looking for a application.xml file, copying to the latest version if found before loading the versions public static int loadApplications(ServletContext servletContext) throws JAXBException, JSONException, InstantiationException, IllegalAccessException, ClassNotFoundException, IllegalArgumentException, SecurityException, InvocationTargetException, NoSuchMethodException, IOException, ParserConfigurationException, SAXException, TransformerFactoryConfigurationError, TransformerException, RapidLoadingException, XPathExpressionException { // get any existing applications Applications applications = (Applications) servletContext.getAttribute("applications"); // check we got some if (applications != null) { // log _logger.info("Closing applications"); // loop the application ids for (String appId : applications.getIds()) { // loop the versions for (String version : applications.getVersions(appId).keySet()) { // get the version Application application = applications.get(appId, version); // close it application.close(servletContext); } } } // log _logger.info("Loading applications"); // assume no apps to ignore List<String> ignoreApps = new ArrayList<>(); // find any applications to ignore String ignoreAppsString = servletContext.getInitParameter("ignoreApps"); // if we got any if (ignoreAppsString != null && ignoreAppsString.trim().length() > 0) { // log _logger.info("Ignoring applications " + ignoreAppsString); // split them String[] ignoreAppsArray = ignoreAppsString.split(","); // loop, trim, and add for (String ignoreApp : ignoreAppsArray) ignoreApps.add(ignoreApp.trim().toLowerCase()); } // assume not apps to load List<String> loadApps = new ArrayList<>(); // get apps file File appsFile = new File(servletContext.getRealPath("/") + "/WEB-INF/loadapps.json"); // if it exists if (appsFile.exists()) { // read the load apps file String loadAppsString = Strings.getString(appsFile); // read it JSONArray jsonApps = new JSONArray(loadAppsString); // ignore it if it has no entries if (jsonApps.length() > 0) { // loop it for (int i = 0; i < jsonApps.length(); i++) { // add to array loadApps.add(jsonApps.getString(i).toLowerCase()); } // add rapid if not there already if (!loadApps.contains("rapid")) loadApps.add("rapid"); // log _logger.info("Loading only applications " + loadApps); } } // make a new set of applications applications = new Applications(); // the application root folder File applicationFolderRoot = new File(servletContext.getRealPath("/") + "/WEB-INF/applications/"); // loop the children of the application folder for (File applicationFolder : applicationFolderRoot.listFiles()) { // get the app folder name into a string String appFolderName = applicationFolder.getName().toLowerCase(); // if this child file is a directory and not in our list of apps to ignore, or present in apps to load if (applicationFolder.isDirectory() && (loadApps.size() == 0 && !ignoreApps.contains(appFolderName) || (loadApps.size() > 0 && loadApps.contains(appFolderName)))) { // get the list of files in this folder - should be all version folders File[] applicationFolders = applicationFolder.listFiles(); // assume we didn't need to version boolean versionCreated = false; // if we got some if (applicationFolders != null) { try { // look for an application file in the root of the application folder File applicationFile = new File(applicationFolder.getAbsoluteFile() + "/application.xml"); // set a version for this app (just in case it doesn't have one) String version = "1"; // if it exists here, it's in the wrong (non-versioned) place! if (applicationFile.exists()) { // create a file for the new version folder File versionFolder = new File(applicationFolder + "/" + version); // keep appending the version if the folder already exists while (versionFolder.exists()) { // append .1 to the version 1, 1.1, 1.1.1, etc version += ".1"; versionFolder = new File(applicationFolder + "/" + version); } // make the dir versionFolder.mkdir(); _logger.info(versionFolder + " created"); // copy in all files and pages folder for (File file : applicationFolders) { // copy all files and the pages folder if (!file.isDirectory() || (file.isDirectory() && "pages".equals(file.getName()))) { // make a desintation file File destFile = new File(versionFolder + "/" + file.getName()); // this is not a version folder itself, copy it to the new version folder Files.copyFolder(file, destFile); // delete the file or folder Files.deleteRecurring(file); // log _logger.info(file + " moved to " + destFile); } } // record that we created a version versionCreated = true; } // application.xml non-versioned check try { // get the version folders File[] versionFolders = applicationFolder.listFiles(); // get a marshaller Marshaller marshaller = RapidHttpServlet.getMarshaller(); // loop them for (File versionFolder : versionFolders) { // check is folder if (versionFolder.isDirectory()) { // look for an application file in the version folder applicationFile = new File(versionFolder + "/application.xml"); // if it exists if (applicationFile.exists()) { // placeholder for the application we're going to version up or just load Application application = null; // if we had to create a version for it if (versionCreated) { // load without resources application = Application.load(servletContext, applicationFile, false); // set the new version application.setVersion(version); // re-initialise it without resources (for the security adapter) application.initialise(servletContext, false); // marshal the updated application object to it's file FileOutputStream fos = new FileOutputStream(applicationFile); marshaller.marshal(application, fos); fos.close(); // get a dir for the pages File pageDir = new File(versionFolder + "/pages"); // check it exists if (pageDir.exists()) { // loop the pages files for (File pageFile : pageDir.listFiles()) { // read the contents of the file String pageContent = Strings.getString(pageFile); // replace all old file references pageContent = pageContent .replace("/" + application.getId() + "/", "/" + application.getId() + "/" + application.getVersion() + "/") .replace("~?a=" + application.getId() + "&amp;", "~?a=" + application.getId() + "&amp;" + application.getVersion() + "&amp;"); // create a file writer FileWriter fs = new FileWriter(pageFile); // save the changes fs.write(pageContent); // close the writer fs.close(); _logger.info(pageFile + " updated with new references"); } } // make a dir for it's web resources File webDir = new File(application.getWebFolder(servletContext)); webDir.mkdir(); _logger.info(webDir + " created"); // loop all the files in the parent for (File file : webDir.getParentFile().listFiles()) { // check not dir if (!file.isDirectory()) { // create a destination file for the new location File destFile = new File(webDir + "/" + file.getName()); // copy it to the new destination Files.copyFile(file, destFile); // delete the file or folder file.delete(); _logger.info(file + " moved to " + destFile); } } } // (re)load the application application = Application.load(servletContext, applicationFile); if(_monitor!=null && _monitor.isAlive(servletContext) && _monitor.isLoggingExceptions()) { long versionFolderSize = Files.getSize(versionFolder); File backupFolder = new File(versionFolder.getAbsoluteFile()+"/_backups"); long versionBackupFolderSize = Files.getSize(backupFolder); _monitor.createEntry(servletContext, application.getName(), application.getVersion(), "loadApp", versionFolderSize-versionBackupFolderSize, versionFolderSize); } // put it in our collection applications.put(application); } } // folder check } // version folder loop } catch (Exception ex) { // log the exception _logger.error("Error loading app " + applicationFile, ex); } // version load catch } catch (Exception ex) { // log it _logger.error("Error creating version folder for app " + applicationFolder, ex); } // version folder creation catch } // application folders check } // application folder check } // application folder loop // store them in the context servletContext.setAttribute("applications", applications); _logger.info(applications.size() + " applications loaded"); return applications.size(); } // Here we loop all of the folders under "workflows" looking for .workflow.xml files public static int loadWorkflows(ServletContext servletContext) throws JAXBException, JSONException, InstantiationException, IllegalAccessException, ClassNotFoundException, IllegalArgumentException, SecurityException, InvocationTargetException, NoSuchMethodException, IOException, ParserConfigurationException, SAXException, TransformerFactoryConfigurationError, TransformerException, RapidLoadingException, XPathExpressionException { // get any existing workflows Workflows workflows = (Workflows) servletContext.getAttribute("workflows"); // check we got some if (workflows != null) { // log _logger.info("Closing workflows"); // loop the application ids for (String workflowId : workflows.getIds()) { // get the workflow Workflow workflow = workflows.get(workflowId); // close it workflow.close(servletContext); } } _logger.info("Loading workflows"); // make a new set of workflows workflows = new Workflows(); // get their folder File folderRoot = new File(servletContext.getRealPath("/") + "/WEB-INF/workflows/"); // if there is one if (folderRoot.isDirectory()) { // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".workflow.xml"); } }; // get the list of files in this folder - should be all workflows File[] files = folderRoot.listFiles(xmlFilenameFilter); // if we got some if (files != null) { // get a marshaller Marshaller marshaller = RapidHttpServlet.getMarshaller(); // loop the files for (File file : files) { // load this workflow Workflow workflow = Workflow.load(servletContext, file); // add to collection workflows.put(workflow.getId(), workflow); } } // workflows files check } // workflows folder check // store them in the context servletContext.setAttribute("workflows", workflows); _logger.info(workflows.size() + " workflows loaded"); return workflows.size(); } public static int loadProcesses(ServletContext servletContext) throws Exception { // get any existing processes List<Process> processes = (List<Process>) servletContext.getAttribute("processes"); // check we got some if (processes != null) { // log _logger.info("Stopping processes"); // loop the application ids for (Process process : processes) { // interrupt the process (which will stop it) process.interrupt(); } } // log _logger.info("Loading processes"); // make a new set of applications processes = new ArrayList<>(); // get the directory in which the process xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/processes/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".process.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/process.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // get a scanner to read the file Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A"); // read the xml into a string String xml = fileScanner.next(); // close the scanner (and file) fileScanner.close(); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the xml into JSON JSONObject jsonProcess = org.json.XML.toJSONObject(xml).getJSONObject("process"); // get the name from the json String name = jsonProcess.getString("name"); // get the class name from the json String className = jsonProcess.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.security.SecurityAdapter if (!Classes.extendsClass(classClass, com.rapid.core.Process.class)) throw new Exception(name + " process class " + classClass.getCanonicalName() + " must extend com.rapid.core.Process"); // get a constructor Constructor constructor = classClass.getConstructor(ServletContext.class, JSONObject.class); // create a process object from the xml Process process = (Process) constructor.newInstance(servletContext, jsonProcess); // start it process.start(); // add it to our collection processes.add(process); } // store them in the context servletContext.setAttribute("processes", processes); // log that we've loaded them _logger.info(processes.size() + " process" + (processes.size() == 1 ? "" : "es") + " loaded"); // return the size return processes.size(); } @Override public void contextInitialized(ServletContextEvent event) { // request windows line breaks to make the files easier to edit (in particular the marshalled .xml files) System.setProperty("line.separator", "\r\n"); // this fixes Illegal reflective access by com.sun.xml.bind.v2.runtime.reflect.opt.Injector, see https://github.com/javaee/jaxb-v2/issues/1197 System.setProperty("com.sun.xml.bind.v2.bytecode.ClassTailor.noOptimize","true"); // get a reference to the servlet context ServletContext servletContext = event.getServletContext(); // set up logging try { // get a logger _logger = LogManager.getLogger(RapidHttpServlet.class); // set the logger and store in servletConext servletContext.setAttribute("logger", _logger); // log! _logger.info("Logger created"); } catch (Exception e) { System.err.println("Error initilising logging : " + e.getMessage()); e.printStackTrace(); } try { // assume no encryptionProvider EncryptionProvider encryptionProvider = null; // look for the rapid.txt file with the saved password and salt File secretsFile = new File(servletContext.getRealPath("/") + "/WEB-INF/security/encryption.txt"); // if it exists if (secretsFile.exists()) { // get a file reader BufferedReader br = new BufferedReader(new FileReader(secretsFile)); // read the first line String className = br.readLine().trim(); // close the reader br.close(); // if the class name does not start with # if (!className.startsWith("#")) { // get the class Class classClass = Class.forName(className); // get the interfaces Class[] classInterfaces = classClass.getInterfaces(); // assume it doesn't have the interface we want boolean gotInterface = false; // check we got some if (classInterfaces != null) { for (Class classInterface : classInterfaces) { if (com.rapid.utils.Encryption.EncryptionProvider.class.equals(classInterface)) { gotInterface = true; break; } } } // check the class extends com.rapid.Action if (gotInterface) { // get the constructors Constructor[] classConstructors = classClass.getDeclaredConstructors(); // check we got some if (classConstructors != null) { // assume we don't get the parameterless one we need Constructor constructor = null; // loop them for (Constructor classConstructor : classConstructors) { // check parameters if (classConstructor.getParameterTypes().length == 0) { constructor = classConstructor; break; } } // check we got what we want if (constructor == null) { _logger.error("Encryption not initialised : Class in security.txt class must have a parameterless constructor"); } else { // construct the class encryptionProvider = (EncryptionProvider) constructor.newInstance(); // log _logger.info("Encryption initialised"); } } } else { _logger.error("Encryption not initialised : Class in security.txt class must extend com.rapid.utils.Encryption.EncryptionProvider"); } } } else { _logger.info("Encyption not initialised"); } _monitor.setUpMonitor(servletContext); // create the encypted xml adapter (if the file above is not found there no encryption will occur) RapidHttpServlet.setEncryptedXmlAdapter(new EncryptedXmlAdapter(encryptionProvider)); // store away the encryption provider RapidHttpServlet.setEncryptionProvider(encryptionProvider); // initialise the schema factory (we'll reuse it in the various loaders) _schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); // initialise the list of classes we're going to want in the JAXB context (the loaders will start adding to it) _jaxbClasses = new ArrayList<>(); // load the logins first _logger.info("Loading logins"); // load the database drivers first loadLogins(servletContext); _logger.info("Loading database drivers"); // load the database drivers loadDatabaseDrivers(servletContext); _logger.info("Loading connection adapters"); // load the connection adapters loadConnectionAdapters(servletContext); _logger.info("Loading security adapters"); // load the security adapters loadSecurityAdapters(servletContext); _logger.info("Loading form adapters"); // load the form adapters loadFormAdapters(servletContext); _logger.info("Loading actions"); // load the actions loadActions(servletContext); _logger.info("Loading themes"); // load themes loadThemes(servletContext); _logger.info("Loading controls"); // load the controls loadControls(servletContext); // add some classes manually _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.NameRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MinOccursRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MaxOccursRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MaxLengthRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MinLengthRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.EnumerationRestriction.class); _jaxbClasses.add(com.rapid.soa.Webservice.class); _jaxbClasses.add(com.rapid.soa.SQLWebservice.class); _jaxbClasses.add(com.rapid.soa.JavaWebservice.class); _jaxbClasses.add(com.rapid.core.Validation.class); _jaxbClasses.add(com.rapid.core.Action.class); _jaxbClasses.add(com.rapid.core.Event.class); _jaxbClasses.add(com.rapid.core.Style.class); _jaxbClasses.add(com.rapid.core.Control.class); _jaxbClasses.add(com.rapid.core.Page.class); _jaxbClasses.add(com.rapid.core.Application.class); _jaxbClasses.add(com.rapid.core.Applications.class); _jaxbClasses.add(com.rapid.core.Workflow.class); _jaxbClasses.add(com.rapid.core.Workflows.class); _jaxbClasses.add(com.rapid.core.Device.class); _jaxbClasses.add(com.rapid.core.Device.Devices.class); _jaxbClasses.add(com.rapid.core.Email.class); // convert arraylist to array Class[] classes = _jaxbClasses.toArray(new Class[_jaxbClasses.size()]); // re-init the JAXB context to include our injectable classes JAXBContext jaxbContext = JAXBContext.newInstance(classes); // this logs the JAXB classes _logger.trace("JAXB content : " + jaxbContext.toString()); // store the jaxb context in RapidHttpServlet RapidHttpServlet.setJAXBContext(jaxbContext); try { // get the extras.min.js file - this is a common cause of bugs on upgrades and will be rebuilt by the first reloaded app File extrasMin = new File(servletContext.getRealPath("/") + "/scripts_min/extras.min.js"); // delete the extras.min.js file if present if (extrasMin.exists()) extrasMin.delete(); } catch (Exception ex) { // just log _logger.info("Failed to delete extras.min.js", ex); } // load the devices Devices.load(servletContext); // load the email settings Email.load(servletContext); // load the applications! loadApplications(servletContext); // load the workflows! loadWorkflows(servletContext); // load the processes loadProcesses(servletContext); // add some useful global objects String localDateFormat = servletContext.getInitParameter("localDateFormat"); if (localDateFormat == null) localDateFormat = "dd/MM/yyyy"; servletContext.setAttribute("localDateFormat", localDateFormat); String localDateTimeFormat = servletContext.getInitParameter("localDateTimeFormat"); if (localDateTimeFormat == null) localDateTimeFormat = "dd/MM/yyyy HH:mm a"; servletContext.setAttribute("localDateTimeFormat", localDateTimeFormat); boolean actionCache = Boolean.parseBoolean(servletContext.getInitParameter("actionCache")); if (actionCache) servletContext.setAttribute("actionCache", new ActionCache(servletContext)); // allow calling to https without checking certs (for now) SSLContext sc = SSLContext.getInstance("SSL"); TrustManager[] trustAllCerts = new TrustManager[]{ new Https.TrustAllCerts() }; sc.init(null, trustAllCerts, new java.security.SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); } catch (Exception ex) { // log error in detail _logger.error("Error initialising Rapid : " + ex.getMessage(), ex); } } @Override public void contextDestroyed(ServletContextEvent event){ // log _logger.info("Shutting down..."); // get the servletContext ServletContext servletContext = event.getServletContext(); // get all processes List<Process> processes = (List<Process>) servletContext.getAttribute("processes"); // if we got some if (processes != null) { // loop them for (Process process : processes) { // log _logger.info("Stopping process ." + process.getName() + "..."); // interrupt the process (which stops it) process.interrupt(); } } // get all of the applications Applications applications = (Applications) servletContext.getAttribute("applications"); // if we got some if (applications != null) { // loop the application ids for (String id : applications.getIds()) { // get the application Versions versions = applications.getVersions(id); // loop the versions of each app for (String version : versions.keySet()) { // log _logger.info("Closing application " + id + "/" + version + "..."); // get the application Application application = applications.get(id, version); // have it close any sensitive resources application.close(servletContext); } } } // sleep for 2 seconds to allow any database connection cleanup to complete try { Thread.sleep(2000); } catch (Exception ex) {} // This manually deregisters JDBC drivers, which prevents Tomcat from complaining about memory leaks from this class Enumeration<Driver> drivers = DriverManager.getDrivers(); while (drivers.hasMoreElements()) { Driver driver = drivers.nextElement(); try { DriverManager.deregisterDriver(driver); _logger.info(String.format("Deregistering jdbc driver: %s", driver)); } catch (SQLException e) { _logger.error(String.format("Error deregistering driver %s", driver), e); } } // Thanks to http://stackoverflow.com/questions/11872316/tomcat-guice-jdbc-memory-leak Set<Thread> threadSet = Thread.getAllStackTraces().keySet(); Thread[] threadArray = threadSet.toArray(new Thread[threadSet.size()]); for (Thread t:threadArray) { if (t.getName().contains("Abandoned connection cleanup thread")) { synchronized (t) { try { _logger.info("Forcing stop of Abandoned connection cleanup thread"); t.stop(); //don't complain, it works } catch (Exception ex) { _logger.info("Error forcing stop of Abandoned connection cleanup thread",ex); } } } } // sleep for 1 second to allow any database connection cleanup to complete try { Thread.sleep(1000); } catch (Exception ex) {} // last log _logger.info("Logger shutdown"); // shutdown logger if (_logger != null) LogManager.shutdown(); } }
src/com/rapid/server/RapidServletContextListener.java
/* Copyright (C) 2019 - Gareth Edwards / Rapid Information Systems [email protected] This file is part of the Rapid Application Platform Rapid is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The terms require you to include the original copyright, and the license notice in all redistributions. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License in a file named "COPYING". If not, see <http://www.gnu.org/licenses/>. */ package com.rapid.server; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.sql.Driver; import java.sql.DriverManager; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Scanner; import java.util.Set; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.servlet.ServletContext; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.stream.StreamSource; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import javax.xml.validation.Validator; import javax.xml.xpath.XPathExpressionException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.xml.sax.SAXException; import com.rapid.core.Action; import com.rapid.core.Application; import com.rapid.core.Application.RapidLoadingException; import com.rapid.core.Applications; import com.rapid.core.Applications.Versions; import com.rapid.core.Device.Devices; import com.rapid.core.Email; import com.rapid.core.Process; import com.rapid.core.Theme; import com.rapid.core.Workflow; import com.rapid.core.Workflows; import com.rapid.utils.Classes; import com.rapid.utils.Comparators; import com.rapid.utils.Encryption.EncryptionProvider; import com.rapid.utils.Files; import com.rapid.utils.Https; import com.rapid.utils.JAXB.EncryptedXmlAdapter; import com.rapid.utils.Strings; public class RapidServletContextListener implements ServletContextListener { // the logger which we will initialise private static Logger _logger; // the schema factory that we will load the actions and controls schemas into private static SchemaFactory _schemaFactory; // all of the classes we are going to put into our jaxb context private static ArrayList<Class> _jaxbClasses; // enterprise monitor protected static Monitor _monitor = new Monitor(); // public static methods public static void logFileNames(File dir, String rootPath) { for (File file : dir.listFiles()) { if (file.isDirectory()) { logFileNames(file, rootPath); } else { String fileName = file.toString(); _logger.info(fileName.substring(rootPath.length())); } } } public static int loadLogins(ServletContext servletContext) throws Exception { int loginCount = 0; // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/logins/"); // if the directory exists if (dir.exists()) { // create an array list of json objects to hold the logins ArrayList<JSONObject> logins = new ArrayList<JSONObject>(); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".login.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/login.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // read the xml into a string String xml = Strings.getString(xmlFile); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonLogin = org.json.XML.toJSONObject(xml).getJSONObject("login"); // add to array list logins.add(jsonLogin); // increment the count loginCount++; } // put the logins in a context attribute (this is available to the security adapters on initialisation) servletContext.setAttribute("jsonLogins", logins); } _logger.info(loginCount + " logins loaded from .login.xml files"); return loginCount; } public static int loadDatabaseDrivers(ServletContext servletContext) throws Exception { // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/databaseDrivers.xsd")); // create a validator Validator validator = schema.newValidator(); // read the xml into a string String xml = Strings.getString(new File(servletContext.getRealPath("/") + "/WEB-INF/database/" + "/databaseDrivers.xml")); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the xml string into JSON JSONObject jsonDatabaseDriverCollection = org.json.XML.toJSONObject(xml).getJSONObject("databaseDrivers"); // prepare the array we are going to popoulate JSONArray jsonDatabaseDrivers = new JSONArray(); JSONObject jsonDatabaseDriver; int index = 0; int count = 0; if (jsonDatabaseDriverCollection.optJSONArray("databaseDriver") == null) { jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONObject("databaseDriver"); } else { jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").getJSONObject(index); count = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").length(); } do { _logger.info("Registering database driver " + jsonDatabaseDriver.getString("name") + " using " + jsonDatabaseDriver.getString("class")); try { // check this type does not already exist for (int i = 0; i < jsonDatabaseDrivers.length(); i++) { if (jsonDatabaseDriver.getString("name").equals(jsonDatabaseDrivers.getJSONObject(i).getString("name"))) throw new Exception(" database driver type is loaded already. Type names must be unique"); } // get the class name String className = jsonDatabaseDriver.getString("class"); // get the current thread class loader (this should log better if there are any issues) ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); // check we got a class loader if (classLoader == null) { // register the class the old fashioned way so the DriverManager can find it Class.forName(className); } else { // register the class on this thread so we can catch any errors Class.forName(className, true, classLoader); } // add the jsonControl to our array jsonDatabaseDrivers.put(jsonDatabaseDriver); } catch (Exception ex) { _logger.error("Error registering database driver : " + ex.getMessage(), ex); } // inc the count of controls in this file index++; // get the next one if (index < count) jsonDatabaseDriver = jsonDatabaseDriverCollection.getJSONArray("databaseDriver").getJSONObject(index); } while (index < count); // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonDatabaseDrivers", jsonDatabaseDrivers); _logger.info(index + " database drivers loaded from databaseDrivers.xml file"); return index; } // loop all of the .connectionAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation public static int loadConnectionAdapters(ServletContext servletContext) throws Exception { int adapterCount = 0; // retain our class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> connectionConstructors = new HashMap<String,Constructor>(); // create an array list of json objects which we will sort later according to the order ArrayList<JSONObject> connectionAdapters = new ArrayList<JSONObject>(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/database/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".connectionadapter.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/connectionAdapter.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // read the xml into a string String xml = Strings.getString(xmlFile); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonConnectionAdapter = org.json.XML.toJSONObject(xml).getJSONObject("connectionAdapter"); // get the class name from the json String className = jsonConnectionAdapter.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.data.ConnectionAdapter if (!Classes.extendsClass(classClass, com.rapid.data.ConnectionAdapter.class)) throw new Exception(classClass.getCanonicalName() + " must extend com.rapid.data.ConnectionAdapter"); // check this class is unique if (connectionConstructors.get(className) != null) throw new Exception(className + " connection adapter already loaded."); // add to constructors hashmap referenced by type connectionConstructors.put(className, classClass.getConstructor(ServletContext.class, String.class, String.class, String.class, String.class)); // add to to our array list connectionAdapters.add(jsonConnectionAdapter); // increment the count adapterCount++; } // sort the connection adapters according to their order property Collections.sort(connectionAdapters, new Comparator<JSONObject>() { @Override public int compare(JSONObject o1, JSONObject o2) { try { return o1.getInt("order") - o2.getInt("order"); } catch (JSONException e) { return 999; } } }); // create a JSON Array object which will hold json for all of the available security adapters JSONArray jsonConnectionAdapters = new JSONArray(); // loop the sorted connection adapters and add to the json array for (JSONObject jsonConnectionAdapter : connectionAdapters) jsonConnectionAdapters.put(jsonConnectionAdapter); // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonConnectionAdapters", jsonConnectionAdapters); // put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("securityConstructors", connectionConstructors); _logger.info(adapterCount + " connection adapters loaded in .connectionAdapter.xml files"); return adapterCount; } // loop all of the .securityAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation public static int loadSecurityAdapters(ServletContext servletContext) throws Exception { int adapterCount = 0; // retain our class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> securityConstructors = new HashMap<String,Constructor>(); // create a JSON Array object which will hold json for all of the available security adapters JSONArray jsonSecurityAdapters = new JSONArray(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/security/"); // create a filter for finding .securityadapter.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".securityadapter.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/securityAdapter.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // read the xml into a string String xml = Strings.getString(xmlFile); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonSecurityAdapter = org.json.XML.toJSONObject(xml).getJSONObject("securityAdapter"); // get the type from the json String type = jsonSecurityAdapter.getString("type"); // get the class name from the json String className = jsonSecurityAdapter.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.security.SecurityAdapter if (!Classes.extendsClass(classClass, com.rapid.security.SecurityAdapter.class)) throw new Exception(type + " security adapter class " + classClass.getCanonicalName() + " must extend com.rapid.security.SecurityAdapter"); // check this type is unique if (securityConstructors.get(type) != null) throw new Exception(type + " security adapter already loaded. Type names must be unique."); // add to constructors hashmap referenced by type securityConstructors.put(type, classClass.getConstructor(ServletContext.class, Application.class)); // add to our collection jsonSecurityAdapters.put(jsonSecurityAdapter); // increment the count adapterCount++; } // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonSecurityAdapters", jsonSecurityAdapters); // put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("securityConstructors", securityConstructors); _logger.info(adapterCount + " security adapters loaded in .securityAdapter.xml files"); return adapterCount; } // loop all of the .securityAdapter.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation public static int loadFormAdapters(ServletContext servletContext) throws Exception { int adapterCount = 0; // retain our form adapter class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> formConstructors = new HashMap<String,Constructor>(); // retain our payment class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> paymentConstructors = new HashMap<String,Constructor>(); // create a JSON Array object which will hold json for all of the available security adapters JSONArray jsonAdapters = new JSONArray(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/forms/"); // create a filter for finding .formadapter.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".formadapter.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/formAdapter.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // read the xml into a string String xml = Strings.getString(xmlFile); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonFormAdapter = org.json.XML.toJSONObject(xml).getJSONObject("formAdapter"); // get the type from the json String type = jsonFormAdapter.getString("type"); // get the class name from the json String className = jsonFormAdapter.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.forms.FormAdapter if (!Classes.extendsClass(classClass, com.rapid.forms.FormAdapter.class)) throw new Exception(type + " form adapter class " + classClass.getCanonicalName() + " must extend com.rapid.forms.FormsAdapter"); // check this type is unique if (formConstructors.get(type) != null) throw new Exception(type + " form adapter already loaded. Type names must be unique."); // add to constructors hashmap referenced by type formConstructors.put(type, classClass.getConstructor(ServletContext.class, Application.class, String.class)); // look for a paymentGateway class className = jsonFormAdapter.optString("paymentClass", null); // if a payment class was provided and we don't yet have a constructor for this payment class if (className != null && paymentConstructors.get(className) == null) { // get the payment class classClass = Class.forName(className); // check the class implements com.rapid.forms.PaymentGateway if (!Classes.extendsClass(classClass, com.rapid.forms.PaymentGateway.class)) throw new Exception(type + " form adapter paymentClass " + classClass.getCanonicalName() + " must extend com.rapid.forms.PaymentGateway"); // add to constructors hashmap referenced by type paymentConstructors.put(className, classClass.getConstructor(ServletContext.class, Application.class)); } // add to our collection jsonAdapters.put(jsonFormAdapter); // increment the count adapterCount++; } // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonFormAdapters", jsonAdapters); // put the constructors hashmap in a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("formConstructors", formConstructors); // put the constructors hashmap in a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("paymentConstructors", paymentConstructors); // log _logger.info(adapterCount + " form adapters loaded in .formAdapter.xml files"); return adapterCount; } // loop all of the .action.xml files and check the injectable classes, so we can re-initialise JAXB context to be able to serialise them, and cache their constructors for speedy initialisation public static int loadActions(ServletContext servletContext) throws Exception { // assume no actions int actionCount = 0; // create a list of json actions which we will sort later List<JSONObject> jsonActions = new ArrayList<JSONObject>(); // retain our class constructors in a hashtable - this speeds up initialisation HashMap<String,Constructor> actionConstructors = new HashMap<String,Constructor>(); // build a collection of classes so we can re-initilise the JAXB context to recognise our injectable classes ArrayList<Action> actions = new ArrayList<Action>(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/actions/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".action.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/action.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // get a scanner to read the file Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A"); // read the xml into a string String xml = fileScanner.next(); // close the scanner (and file) fileScanner.close(); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonActionCollection = org.json.XML.toJSONObject(xml).getJSONObject("actions"); JSONObject jsonAction; int index = 0; int count = 0; // the JSON library will add a single key of there is a single class, otherwise an array if (jsonActionCollection.optJSONArray("action") == null) { jsonAction = jsonActionCollection.getJSONObject("action"); } else { jsonAction = jsonActionCollection.getJSONArray("action").getJSONObject(index); count = jsonActionCollection.getJSONArray("action").length(); } do { // check this type does not already exist for (int i = 0; i < jsonActions.size(); i++) { if (jsonAction.getString("type").equals(jsonActions.get(i).getString("type"))) throw new Exception(" action type is loaded already. Type names must be unique"); } // add the jsonControl to our array jsonActions.add(jsonAction); // get the named type from the json String type = jsonAction.getString("type"); // get the class name from the json String className = jsonAction.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.Action if (!Classes.extendsClass(classClass, com.rapid.core.Action.class)) throw new Exception(type + " action class " + classClass.getCanonicalName() + " must extend com.rapid.core.Action."); // check this type is unique if (actionConstructors.get(type) != null) throw new Exception(type + " action already loaded. Type names must be unique."); // add to constructors hashmap referenced by type actionConstructors.put(type, classClass.getConstructor(RapidHttpServlet.class, JSONObject.class)); // add to our jaxb classes collection _jaxbClasses.add(classClass); // inc the control count actionCount ++; // inc the count of controls in this file index++; // get the next one if (index < count) jsonAction = jsonActionCollection.getJSONArray("control").getJSONObject(index); } while (index < count); } // sort the list of actions by name Collections.sort(jsonActions, new Comparator<JSONObject>() { @Override public int compare(JSONObject c1, JSONObject c2) { try { return Comparators.AsciiCompare(c1.getString("name"), c2.getString("name"), false); } catch (JSONException e) { return 0; } } }); // create a JSON Array object which will hold json for all of the available controls JSONArray jsonArrayActions = new JSONArray(jsonActions); // put the jsonControls in a context attribute (this is available via the getJsonActions method in RapidHttpServlet) servletContext.setAttribute("jsonActions", jsonArrayActions); // put the constructors hashmapin a context attribute (this is available via the getContructor method in RapidHttpServlet) servletContext.setAttribute("actionConstructors", actionConstructors); _logger.info(actionCount + " actions loaded in .action.xml files"); return actionCount; } // here we loop all of the control.xml files and instantiate the json class object/functions and cache them in the servletContext public static int loadControls(ServletContext servletContext) throws Exception { // assume no controls int controlCount = 0; // create a list for our controls List<JSONObject> jsonControls = new ArrayList<JSONObject>(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/controls/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".control.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/control.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // get a scanner to read the file Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A"); // read the xml into a string String xml = fileScanner.next(); // close the scanner (and file) fileScanner.close(); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the string into JSON JSONObject jsonControlCollection = org.json.XML.toJSONObject(xml).getJSONObject("controls"); JSONObject jsonControl; int index = 0; int count = 0; if (jsonControlCollection.optJSONArray("control") == null) { jsonControl = jsonControlCollection.getJSONObject("control"); } else { jsonControl = jsonControlCollection.getJSONArray("control").getJSONObject(index); count = jsonControlCollection.getJSONArray("control").length(); } do { // check this type does not already exist for (int i = 0; i < jsonControls.size(); i++) { if (jsonControl.getString("type").equals(jsonControls.get(i).getString("type"))) throw new Exception(" control type is loaded already. Type names must be unique"); } // add the jsonControl to our array jsonControls.add(jsonControl); // inc the control count controlCount ++; // inc the count of controls in this file index++; // get the next one if (index < count) jsonControl = jsonControlCollection.getJSONArray("control").getJSONObject(index); } while (index < count); } // sort the list of controls by name Collections.sort(jsonControls, new Comparator<JSONObject>() { @Override public int compare(JSONObject c1, JSONObject c2) { try { return Comparators.AsciiCompare(c1.getString("name"), c2.getString("name"), false); } catch (JSONException e) { return 0; } } }); // create a JSON Array object which will hold json for all of the available controls JSONArray jsonArrayControls = new JSONArray(jsonControls); // put the jsonControls in a context attribute (this is available via the getJsonControls method in RapidHttpServlet) servletContext.setAttribute("jsonControls", jsonArrayControls); _logger.info(controlCount + " controls loaded in .control.xml files"); return controlCount; } // here we loop all of the theme.xml files and instantiate the json class object/functions and cache them in the servletContext public static int loadThemes(ServletContext servletContext) throws Exception { // assume no themes int themeCount = 0; // create a list for our themes List<Theme> themes = new ArrayList<Theme>(); // get the directory in which the control xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/themes/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".theme.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/theme.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // get a scanner to read the file Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A"); // read the xml into a string String xml = fileScanner.next(); // close the scanner (and file) fileScanner.close(); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // create a theme object from the xml Theme theme = new Theme(xml); // add it to our collection themes.add(theme); // inc the template count themeCount ++; } // sort the list of templates by name Collections.sort(themes, new Comparator<Theme>() { @Override public int compare(Theme t1, Theme t2) { return Comparators.AsciiCompare(t1.getName(), t2.getName(), false); } }); // put the jsonControls in a context attribute (this is available via the getJsonControls method in RapidHttpServlet) servletContext.setAttribute("themes", themes); _logger.info(themeCount + " themes loaded in .theme.xml files"); return themeCount; } // Here we loop all of the folders under "applications" looking for a application.xml file, copying to the latest version if found before loading the versions public static int loadApplications(ServletContext servletContext) throws JAXBException, JSONException, InstantiationException, IllegalAccessException, ClassNotFoundException, IllegalArgumentException, SecurityException, InvocationTargetException, NoSuchMethodException, IOException, ParserConfigurationException, SAXException, TransformerFactoryConfigurationError, TransformerException, RapidLoadingException, XPathExpressionException { // get any existing applications Applications applications = (Applications) servletContext.getAttribute("applications"); // check we got some if (applications != null) { // log _logger.info("Closing applications"); // loop the application ids for (String appId : applications.getIds()) { // loop the versions for (String version : applications.getVersions(appId).keySet()) { // get the version Application application = applications.get(appId, version); // close it application.close(servletContext); } } } // log _logger.info("Loading applications"); // assume no apps to ignore List<String> ignoreApps = new ArrayList<String>(); // find any applications to ignore String ignoreAppsString = servletContext.getInitParameter("ignoreApps"); // if we got any if (ignoreAppsString != null && ignoreAppsString.trim().length() > 0) { // log _logger.info("Ignoring applications " + ignoreAppsString); // split them String[] ignoreAppsArray = ignoreAppsString.split(","); // loop, trim, and add for (String ignoreApp : ignoreAppsArray) ignoreApps.add(ignoreApp.trim().toLowerCase()); } // assume not apps to load List<String> loadApps = new ArrayList<String>(); // get apps file File appsFile = new File(servletContext.getRealPath("/") + "/WEB-INF/loadapps.json"); // if it exists if (appsFile.exists()) { // read the load apps file String loadAppsString = Strings.getString(appsFile); // read it JSONArray jsonApps = new JSONArray(loadAppsString); // ignore it if it has no entries if (jsonApps.length() > 0) { // loop it for (int i = 0; i < jsonApps.length(); i++) { // add to array loadApps.add(jsonApps.getString(i).toLowerCase()); } // add rapid if not there already if (!loadApps.contains("rapid")) loadApps.add("rapid"); // log _logger.info("Loading only applications " + loadApps); } } // make a new set of applications applications = new Applications(); // the application root folder File applicationFolderRoot = new File(servletContext.getRealPath("/") + "/WEB-INF/applications/"); // loop the children of the application folder for (File applicationFolder : applicationFolderRoot.listFiles()) { // get the app folder name into a string String appFolderName = applicationFolder.getName().toLowerCase(); // if this child file is a directory and not in our list of apps to ignore, or present in apps to load if (applicationFolder.isDirectory() && (loadApps.size() == 0 && !ignoreApps.contains(appFolderName) || (loadApps.size() > 0 && loadApps.contains(appFolderName)))) { // get the list of files in this folder - should be all version folders File[] applicationFolders = applicationFolder.listFiles(); // assume we didn't need to version boolean versionCreated = false; // if we got some if (applicationFolders != null) { try { // look for an application file in the root of the application folder File applicationFile = new File(applicationFolder.getAbsoluteFile() + "/application.xml"); // set a version for this app (just in case it doesn't have one) String version = "1"; // if it exists here, it's in the wrong (non-versioned) place! if (applicationFile.exists()) { // create a file for the new version folder File versionFolder = new File(applicationFolder + "/" + version); // keep appending the version if the folder already exists while (versionFolder.exists()) { // append .1 to the version 1, 1.1, 1.1.1, etc version += ".1"; versionFolder = new File(applicationFolder + "/" + version); } // make the dir versionFolder.mkdir(); _logger.info(versionFolder + " created"); // copy in all files and pages folder for (File file : applicationFolders) { // copy all files and the pages folder if (!file.isDirectory() || (file.isDirectory() && "pages".equals(file.getName()))) { // make a desintation file File destFile = new File(versionFolder + "/" + file.getName()); // this is not a version folder itself, copy it to the new version folder Files.copyFolder(file, destFile); // delete the file or folder Files.deleteRecurring(file); // log _logger.info(file + " moved to " + destFile); } } // record that we created a version versionCreated = true; } // application.xml non-versioned check try { // get the version folders File[] versionFolders = applicationFolder.listFiles(); // get a marshaller Marshaller marshaller = RapidHttpServlet.getMarshaller(); // loop them for (File versionFolder : versionFolders) { // check is folder if (versionFolder.isDirectory()) { // look for an application file in the version folder applicationFile = new File(versionFolder + "/application.xml"); // if it exists if (applicationFile.exists()) { // placeholder for the application we're going to version up or just load Application application = null; // if we had to create a version for it if (versionCreated) { // load without resources application = Application.load(servletContext, applicationFile, false); // set the new version application.setVersion(version); // re-initialise it without resources (for the security adapter) application.initialise(servletContext, false); // marshal the updated application object to it's file FileOutputStream fos = new FileOutputStream(applicationFile); marshaller.marshal(application, fos); fos.close(); // get a dir for the pages File pageDir = new File(versionFolder + "/pages"); // check it exists if (pageDir.exists()) { // loop the pages files for (File pageFile : pageDir.listFiles()) { // read the contents of the file String pageContent = Strings.getString(pageFile); // replace all old file references pageContent = pageContent .replace("/" + application.getId() + "/", "/" + application.getId() + "/" + application.getVersion() + "/") .replace("~?a=" + application.getId() + "&amp;", "~?a=" + application.getId() + "&amp;" + application.getVersion() + "&amp;"); // create a file writer FileWriter fs = new FileWriter(pageFile); // save the changes fs.write(pageContent); // close the writer fs.close(); _logger.info(pageFile + " updated with new references"); } } // make a dir for it's web resources File webDir = new File(application.getWebFolder(servletContext)); webDir.mkdir(); _logger.info(webDir + " created"); // loop all the files in the parent for (File file : webDir.getParentFile().listFiles()) { // check not dir if (!file.isDirectory()) { // create a destination file for the new location File destFile = new File(webDir + "/" + file.getName()); // copy it to the new destination Files.copyFile(file, destFile); // delete the file or folder file.delete(); _logger.info(file + " moved to " + destFile); } } } // (re)load the application application = Application.load(servletContext, applicationFile); if(_monitor!=null && _monitor.isAlive(servletContext) && _monitor.isLoggingExceptions()) { long versionFolderSize = Files.getSize(versionFolder); File backupFolder = new File(versionFolder.getAbsoluteFile()+"/_backups"); long versionBackupFolderSize = Files.getSize(backupFolder); _monitor.createEntry(servletContext, application.getName(), application.getVersion(), "loadApp", versionFolderSize-versionBackupFolderSize, versionFolderSize); } // put it in our collection applications.put(application); } } // folder check } // version folder loop } catch (Exception ex) { // log the exception _logger.error("Error loading app " + applicationFile, ex); } // version load catch } catch (Exception ex) { // log it _logger.error("Error creating version folder for app " + applicationFolder, ex); } // version folder creation catch } // application folders check } // application folder check } // application folder loop // store them in the context servletContext.setAttribute("applications", applications); _logger.info(applications.size() + " applications loaded"); return applications.size(); } // Here we loop all of the folders under "workflows" looking for .workflow.xml files public static int loadWorkflows(ServletContext servletContext) throws JAXBException, JSONException, InstantiationException, IllegalAccessException, ClassNotFoundException, IllegalArgumentException, SecurityException, InvocationTargetException, NoSuchMethodException, IOException, ParserConfigurationException, SAXException, TransformerFactoryConfigurationError, TransformerException, RapidLoadingException, XPathExpressionException { // get any existing workflows Workflows workflows = (Workflows) servletContext.getAttribute("workflows"); // check we got some if (workflows != null) { // log _logger.info("Closing workflows"); // loop the application ids for (String workflowId : workflows.getIds()) { // get the workflow Workflow workflow = workflows.get(workflowId); // close it workflow.close(servletContext); } } _logger.info("Loading workflows"); // make a new set of workflows workflows = new Workflows(); // get their folder File folderRoot = new File(servletContext.getRealPath("/") + "/WEB-INF/workflows/"); // if there is one if (folderRoot.isDirectory()) { // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".workflow.xml"); } }; // get the list of files in this folder - should be all workflows File[] files = folderRoot.listFiles(xmlFilenameFilter); // if we got some if (files != null) { // get a marshaller Marshaller marshaller = RapidHttpServlet.getMarshaller(); // loop the files for (File file : files) { // load this workflow Workflow workflow = Workflow.load(servletContext, file); // add to collection workflows.put(workflow.getId(), workflow); } } // workflows files check } // workflows folder check // store them in the context servletContext.setAttribute("workflows", workflows); _logger.info(workflows.size() + " workflows loaded"); return workflows.size(); } public static int loadProcesses(ServletContext servletContext) throws Exception { // get any existing processes List<Process> processes = (List<Process>) servletContext.getAttribute("processes"); // check we got some if (processes != null) { // log _logger.info("Stopping processes"); // loop the application ids for (Process process : processes) { // interrupt the process (which will stop it) process.interrupt(); } } // log _logger.info("Loading processes"); // make a new set of applications processes = new ArrayList<Process>(); // get the directory in which the process xml files are stored File dir = new File(servletContext.getRealPath("/") + "/WEB-INF/processes/"); // create a filter for finding .control.xml files FilenameFilter xmlFilenameFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".process.xml"); } }; // create a schema object for the xsd Schema schema = _schemaFactory.newSchema(new File(servletContext.getRealPath("/") + "/WEB-INF/schemas/" + "/process.xsd")); // create a validator Validator validator = schema.newValidator(); // loop the xml files in the folder for (File xmlFile : dir.listFiles(xmlFilenameFilter)) { // get a scanner to read the file Scanner fileScanner = new Scanner(xmlFile).useDelimiter("\\A"); // read the xml into a string String xml = fileScanner.next(); // close the scanner (and file) fileScanner.close(); // validate the control xml file against the schema validator.validate(new StreamSource(new ByteArrayInputStream(xml.getBytes("UTF-8")))); // convert the xml into JSON JSONObject jsonProcess = org.json.XML.toJSONObject(xml).getJSONObject("process"); // get the name from the json String name = jsonProcess.getString("name"); // get the class name from the json String className = jsonProcess.getString("class"); // get the class Class classClass = Class.forName(className); // check the class extends com.rapid.security.SecurityAdapter if (!Classes.extendsClass(classClass, com.rapid.core.Process.class)) throw new Exception(name + " process class " + classClass.getCanonicalName() + " must extend com.rapid.core.Process"); // get a constructor Constructor constructor = classClass.getConstructor(ServletContext.class, JSONObject.class); // create a process object from the xml Process process = (Process) constructor.newInstance(servletContext, jsonProcess); // start it process.start(); // add it to our collection processes.add(process); } // store them in the context servletContext.setAttribute("processes", processes); // log that we've loaded them _logger.info(processes.size() + " process" + (processes.size() == 1 ? "" : "es") + " loaded"); // return the size return processes.size(); } @Override public void contextInitialized(ServletContextEvent event) { // request windows line breaks to make the files easier to edit (in particular the marshalled .xml files) System.setProperty("line.separator", "\r\n"); // this fixes Illegal reflective access by com.sun.xml.bind.v2.runtime.reflect.opt.Injector, see https://github.com/javaee/jaxb-v2/issues/1197 System.setProperty("com.sun.xml.bind.v2.bytecode.ClassTailor.noOptimize","true"); // get a reference to the servlet context ServletContext servletContext = event.getServletContext(); // set up logging try { // get a logger _logger = LogManager.getLogger(RapidHttpServlet.class); // set the logger and store in servletConext servletContext.setAttribute("logger", _logger); // log! _logger.info("Logger created"); } catch (Exception e) { System.err.println("Error initilising logging : " + e.getMessage()); e.printStackTrace(); } try { // assume no encryptionProvider EncryptionProvider encryptionProvider = null; // look for the rapid.txt file with the saved password and salt File secretsFile = new File(servletContext.getRealPath("/") + "/WEB-INF/security/encryption.txt"); // if it exists if (secretsFile.exists()) { // get a file reader BufferedReader br = new BufferedReader(new FileReader(secretsFile)); // read the first line String className = br.readLine().trim(); // close the reader br.close(); // if the class name does not start with # if (!className.startsWith("#")) { // get the class Class classClass = Class.forName(className); // get the interfaces Class[] classInterfaces = classClass.getInterfaces(); // assume it doesn't have the interface we want boolean gotInterface = false; // check we got some if (classInterfaces != null) { for (Class classInterface : classInterfaces) { if (com.rapid.utils.Encryption.EncryptionProvider.class.equals(classInterface)) { gotInterface = true; break; } } } // check the class extends com.rapid.Action if (gotInterface) { // get the constructors Constructor[] classConstructors = classClass.getDeclaredConstructors(); // check we got some if (classConstructors != null) { // assume we don't get the parameterless one we need Constructor constructor = null; // loop them for (Constructor classConstructor : classConstructors) { // check parameters if (classConstructor.getParameterTypes().length == 0) { constructor = classConstructor; break; } } // check we got what we want if (constructor == null) { _logger.error("Encryption not initialised : Class in security.txt class must have a parameterless constructor"); } else { // construct the class encryptionProvider = (EncryptionProvider) constructor.newInstance(); // log _logger.info("Encryption initialised"); } } } else { _logger.error("Encryption not initialised : Class in security.txt class must extend com.rapid.utils.Encryption.EncryptionProvider"); } } } else { _logger.info("Encyption not initialised"); } _monitor.setUpMonitor(servletContext); // create the encypted xml adapter (if the file above is not found there no encryption will occur) RapidHttpServlet.setEncryptedXmlAdapter(new EncryptedXmlAdapter(encryptionProvider)); // store away the encryption provider RapidHttpServlet.setEncryptionProvider(encryptionProvider); // initialise the schema factory (we'll reuse it in the various loaders) _schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); // initialise the list of classes we're going to want in the JAXB context (the loaders will start adding to it) _jaxbClasses = new ArrayList<Class>(); // load the logins first _logger.info("Loading logins"); // load the database drivers first loadLogins(servletContext); _logger.info("Loading database drivers"); // load the database drivers loadDatabaseDrivers(servletContext); _logger.info("Loading connection adapters"); // load the connection adapters loadConnectionAdapters(servletContext); _logger.info("Loading security adapters"); // load the security adapters loadSecurityAdapters(servletContext); _logger.info("Loading form adapters"); // load the form adapters loadFormAdapters(servletContext); _logger.info("Loading actions"); // load the actions loadActions(servletContext); _logger.info("Loading themes"); // load themes loadThemes(servletContext); _logger.info("Loading controls"); // load the controls loadControls(servletContext); // add some classes manually _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.NameRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MinOccursRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MaxOccursRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MaxLengthRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.MinLengthRestriction.class); _jaxbClasses.add(com.rapid.soa.SOAElementRestriction.EnumerationRestriction.class); _jaxbClasses.add(com.rapid.soa.Webservice.class); _jaxbClasses.add(com.rapid.soa.SQLWebservice.class); _jaxbClasses.add(com.rapid.soa.JavaWebservice.class); _jaxbClasses.add(com.rapid.core.Validation.class); _jaxbClasses.add(com.rapid.core.Action.class); _jaxbClasses.add(com.rapid.core.Event.class); _jaxbClasses.add(com.rapid.core.Style.class); _jaxbClasses.add(com.rapid.core.Control.class); _jaxbClasses.add(com.rapid.core.Page.class); _jaxbClasses.add(com.rapid.core.Application.class); _jaxbClasses.add(com.rapid.core.Applications.class); _jaxbClasses.add(com.rapid.core.Workflow.class); _jaxbClasses.add(com.rapid.core.Workflows.class); _jaxbClasses.add(com.rapid.core.Device.class); _jaxbClasses.add(com.rapid.core.Device.Devices.class); _jaxbClasses.add(com.rapid.core.Email.class); // convert arraylist to array Class[] classes = _jaxbClasses.toArray(new Class[_jaxbClasses.size()]); // re-init the JAXB context to include our injectable classes JAXBContext jaxbContext = JAXBContext.newInstance(classes); // this logs the JAXB classes _logger.trace("JAXB content : " + jaxbContext.toString()); // store the jaxb context in RapidHttpServlet RapidHttpServlet.setJAXBContext(jaxbContext); try { // get the extras.min.js file - this is a common cause of bugs on upgrades and will be rebuilt by the first reloaded app File extrasMin = new File(servletContext.getRealPath("/") + "/scripts_min/extras.min.js"); // delete the extras.min.js file if present if (extrasMin.exists()) extrasMin.delete(); } catch (Exception ex) { // just log _logger.info("Failed to delete extras.min.js", ex); } // load the devices Devices.load(servletContext); // load the email settings Email.load(servletContext); // load the applications! loadApplications(servletContext); // load the workflows! loadWorkflows(servletContext); // load the processes loadProcesses(servletContext); // add some useful global objects String localDateFormat = servletContext.getInitParameter("localDateFormat"); if (localDateFormat == null) localDateFormat = "dd/MM/yyyy"; servletContext.setAttribute("localDateFormat", localDateFormat); String localDateTimeFormat = servletContext.getInitParameter("localDateTimeFormat"); if (localDateTimeFormat == null) localDateTimeFormat = "dd/MM/yyyy HH:mm a"; servletContext.setAttribute("localDateTimeFormat", localDateTimeFormat); boolean actionCache = Boolean.parseBoolean(servletContext.getInitParameter("actionCache")); if (actionCache) servletContext.setAttribute("actionCache", new ActionCache(servletContext)); // allow calling to https without checking certs (for now) SSLContext sc = SSLContext.getInstance("SSL"); TrustManager[] trustAllCerts = new TrustManager[]{ new Https.TrustAllCerts() }; sc.init(null, trustAllCerts, new java.security.SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); } catch (Exception ex) { _logger.error("Error initialising Rapid : " + ex.getMessage()); ex.printStackTrace(); } } @Override public void contextDestroyed(ServletContextEvent event){ // log _logger.info("Shutting down..."); // get the servletContext ServletContext servletContext = event.getServletContext(); // get all processes List<Process> processes = (List<Process>) servletContext.getAttribute("processes"); // if we got some if (processes != null) { // loop them for (Process process : processes) { // log _logger.info("Stopping process ." + process.getName() + "..."); // interrupt the process (which stops it) process.interrupt(); } } // get all of the applications Applications applications = (Applications) servletContext.getAttribute("applications"); // if we got some if (applications != null) { // loop the application ids for (String id : applications.getIds()) { // get the application Versions versions = applications.getVersions(id); // loop the versions of each app for (String version : versions.keySet()) { // log _logger.info("Closing application " + id + "/" + version + "..."); // get the application Application application = applications.get(id, version); // have it close any sensitive resources application.close(servletContext); } } } // sleep for 2 seconds to allow any database connection cleanup to complete try { Thread.sleep(2000); } catch (Exception ex) {} // This manually deregisters JDBC drivers, which prevents Tomcat from complaining about memory leaks from this class Enumeration<Driver> drivers = DriverManager.getDrivers(); while (drivers.hasMoreElements()) { Driver driver = drivers.nextElement(); try { DriverManager.deregisterDriver(driver); _logger.info(String.format("Deregistering jdbc driver: %s", driver)); } catch (SQLException e) { _logger.error(String.format("Error deregistering driver %s", driver), e); } } // Thanks to http://stackoverflow.com/questions/11872316/tomcat-guice-jdbc-memory-leak Set<Thread> threadSet = Thread.getAllStackTraces().keySet(); Thread[] threadArray = threadSet.toArray(new Thread[threadSet.size()]); for (Thread t:threadArray) { if (t.getName().contains("Abandoned connection cleanup thread")) { synchronized (t) { try { _logger.info("Forcing stop of Abandoned connection cleanup thread"); t.stop(); //don't complain, it works } catch (Exception ex) { _logger.info("Error forcing stop of Abandoned connection cleanup thread",ex); } } } } // sleep for 1 second to allow any database connection cleanup to complete try { Thread.sleep(1000); } catch (Exception ex) {} // last log _logger.info("Logger shutdown"); // shutdown logger if (_logger != null) LogManager.shutdown(); } }
Full stack trace logging of error messages on startup
src/com/rapid/server/RapidServletContextListener.java
Full stack trace logging of error messages on startup
Java
agpl-3.0
7d69b6bcf860fe82f8ffa6cc13af4e198ab65fd3
0
play4science/gwap,play4science/gwap,play4science/gwap,play4science/gwap
/* * This file is part of gwap, an open platform for games with a purpose * * Copyright (C) 2013 * Project play4science * Lehr- und Forschungseinheit für Programmier- und Modellierungssprachen * Ludwig-Maximilians-Universität München * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package gwap.mit; import gwap.model.Person; import gwap.model.action.Bet; import gwap.model.resource.Location; import gwap.model.resource.Statement; import gwap.tools.StatementHelper; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.Query; import org.jboss.seam.ScopeType; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Logger; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Out; import org.jboss.seam.annotations.Scope; import org.jboss.seam.faces.FacesMessages; import org.jboss.seam.log.Log; /** * @author Fabian Kneißl */ @Name("mitNewStatement") @Scope(ScopeType.PAGE) public class NewStatement implements Serializable { private static final long serialVersionUID = 1L; @Logger Log log; @In protected EntityManager entityManager; @In(create=true) protected Person person; @In protected FacesMessages facesMessages; @In protected Coins mitCoins; @In(required=false)@Out(required=false) protected Long locationId; @Out protected List<Location> breadcrumbLocations = new ArrayList<Location>(); protected String text; protected String standardText; protected Integer points = 100; // initialize with being a perfect match protected Statement statement; public String createStatement() { if (statement == null) { if (text != null && text.length() > 2) { if (standardText != null && standardText.length() > 2) { text = text.trim(); standardText = standardText.trim(); Query query = entityManager.createNamedQuery("statement.byText").setParameter("text", text); if (query.getResultList().size() > 0) { statement = (Statement) query.getResultList().get(0); log.info("Statement already exists: '#0' (id: #1)", text, statement.getId()); } else { log.info("Creating statement '#0'", text); statement = new Statement(); statement.setText(text); statement.setCreator(person); statement.setEnabled(true); statement.setCreateDate(new Date()); entityManager.persist(statement); StatementHelper.createStatementTokens(statement, text, entityManager); StatementHelper.createStatementStandardTokens(statement, standardText, entityManager); entityManager.flush(); log.info("#0 created", statement); } } else facesMessages.addToControlFromResourceBundle("standardText", "game.newstatement.standardTextTooShort"); } else facesMessages.addToControlFromResourceBundle("text", "game.newstatement.tooShort"); } // Now, assign a location if (statement != null) { if (assignLocation()) { return "/newstatementcreated.xhtml"; } else facesMessages.addToControlFromResourceBundle("text", "game.newstatement.selectLocation"); } return null; } public boolean assignLocation() { log.info("Trying to assign locationId #0 to statement #1", locationId, statement); if (locationId == null || locationId <= 0) return false; Location location = entityManager.find(Location.class, locationId); if (location == null) return false; Bet bet = new Bet(); bet.setCreated(new Date()); bet.setLocation(location); bet.setResource(statement); bet.setPerson(person); bet.setPoints(points); entityManager.persist(bet); log.info("Assigned location #0 to statement #1", location, statement); return true; } public List<Location> addToBreadcrumbLocation(Long locationId) { Location l = entityManager.find(Location.class, locationId); if (l != null) breadcrumbLocations.add(l); return breadcrumbLocations; } public List<Location> navigateToBreadcrumbLocation(Long locationId) { for (int i = 0; i < breadcrumbLocations.size(); i++) { if (breadcrumbLocations.get(i).getId().equals(locationId)) { for (int j = i+1; j < breadcrumbLocations.size(); j++) { breadcrumbLocations.remove(j); } break; } } return breadcrumbLocations; } public String getText() { return text; } public void setText(String text) { this.text = text; } public String getStandardText() { return standardText; } public void setStandardText(String standardText) { this.standardText = standardText; } public Integer getPoints() { return points; } public void setPoints(Integer points) { this.points = points; } }
src/hot/gwap/mit/NewStatement.java
/* * This file is part of gwap, an open platform for games with a purpose * * Copyright (C) 2013 * Project play4science * Lehr- und Forschungseinheit für Programmier- und Modellierungssprachen * Ludwig-Maximilians-Universität München * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package gwap.mit; import gwap.model.Person; import gwap.model.action.Bet; import gwap.model.resource.Location; import gwap.model.resource.Statement; import gwap.tools.StatementHelper; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.Query; import org.jboss.seam.ScopeType; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Logger; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Out; import org.jboss.seam.annotations.Scope; import org.jboss.seam.faces.FacesMessages; import org.jboss.seam.log.Log; /** * @author Fabian Kneißl */ @Name("mitNewStatement") @Scope(ScopeType.PAGE) public class NewStatement implements Serializable { private static final long serialVersionUID = 1L; @Logger Log log; @In protected EntityManager entityManager; @In(create=true) protected Person person; @In protected FacesMessages facesMessages; @In protected Coins mitCoins; @In(required=false)@Out(required=false) protected Long locationId; @Out protected List<Location> breadcrumbLocations = new ArrayList<Location>(); protected String text; protected String standardText; protected Integer points; protected Statement statement; public String createStatement() { if (statement == null) { if (text != null && text.length() > 2) { if (standardText != null && standardText.length() > 2) { text = text.trim(); standardText = standardText.trim(); Query query = entityManager.createNamedQuery("statement.byText").setParameter("text", text); if (query.getResultList().size() > 0) { statement = (Statement) query.getResultList().get(0); log.info("Statement already exists: '#0' (id: #1)", text, statement.getId()); } else { log.info("Creating statement '#0'", text); statement = new Statement(); statement.setText(text); statement.setCreator(person); statement.setEnabled(true); statement.setCreateDate(new Date()); entityManager.persist(statement); StatementHelper.createStatementTokens(statement, text, entityManager); StatementHelper.createStatementStandardTokens(statement, standardText, entityManager); entityManager.flush(); log.info("#0 created", statement); } } else facesMessages.addToControlFromResourceBundle("standardText", "game.newstatement.standardTextTooShort"); } else facesMessages.addToControlFromResourceBundle("text", "game.newstatement.tooShort"); } // Now, assign a location if (statement != null) { if (assignLocation()) { return "/newstatementcreated.xhtml"; } else facesMessages.addToControlFromResourceBundle("text", "game.newstatement.selectLocation"); } return null; } public boolean assignLocation() { log.info("Trying to assign locationId #0 to statement #1", locationId, statement); if (locationId == null || locationId <= 0) return false; Location location = entityManager.find(Location.class, locationId); if (location == null) return false; Bet bet = new Bet(); bet.setCreated(new Date()); bet.setLocation(location); bet.setResource(statement); bet.setPerson(person); bet.setPoints(points); entityManager.persist(bet); log.info("Assigned location #0 to statement #1", location, statement); return true; } public List<Location> addToBreadcrumbLocation(Long locationId) { Location l = entityManager.find(Location.class, locationId); if (l != null) breadcrumbLocations.add(l); return breadcrumbLocations; } public List<Location> navigateToBreadcrumbLocation(Long locationId) { for (int i = 0; i < breadcrumbLocations.size(); i++) { if (breadcrumbLocations.get(i).getId().equals(locationId)) { for (int j = i+1; j < breadcrumbLocations.size(); j++) { breadcrumbLocations.remove(j); } break; } } return breadcrumbLocations; } public String getText() { return text; } public void setText(String text) { this.text = text; } public String getStandardText() { return standardText; } public void setStandardText(String standardText) { this.standardText = standardText; } public Integer getPoints() { return points; } public void setPoints(Integer points) { this.points = points; } }
metropolitalia: set default match to 100%
src/hot/gwap/mit/NewStatement.java
metropolitalia: set default match to 100%
Java
lgpl-2.1
cd69040bf8915696de8377a1a85cbdf31e5ac5f6
0
oskopek/jfreechart-fse,oskopek/jfreechart-fse,jfree/jfreechart-fse,jfree/jfreechart-fse
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2013, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners.] * * ----------------------------- * DefaultPolarItemRenderer.java * ----------------------------- * (C) Copyright 2004-2013, by Solution Engineering, Inc. and * Contributors. * * Original Author: Daniel Bridenbecker, Solution Engineering, Inc.; * Contributor(s): David Gilbert (for Object Refinery Limited); * Martin Hoeller (patch 2850344); * * Changes * ------- * 19-Jan-2004 : Version 1, contributed by DB with minor changes by DG (DG); * 15-Jul-2004 : Switched getX() with getXValue() and getY() with * getYValue() (DG); * 04-Oct-2004 : Renamed BooleanUtils --> BooleanUtilities (DG); * 20-Apr-2005 : Update for change to LegendItem class (DG); * ------------- JFREECHART 1.0.x --------------------------------------------- * 04-Aug-2006 : Implemented equals() and clone() (DG); * 02-Feb-2007 : Removed author tags from all over JFreeChart sources (DG); * 14-Mar-2007 : Fixed clone() method (DG); * 04-May-2007 : Fixed lookup for series paint and stroke (DG); * 18-May-2007 : Set dataset for LegendItem (DG); * 03-Sep-2009 : Applied patch 2850344 by Martin Hoeller (DG); * 27-Nov-2009 : Updated for modification to PolarItemRenderer interface (DG); * 03-Oct-2011 : Fixed potential NPE in equals() (MH); * 03-Oct-2011 : Added flag to connectFirstAndLastPoint (MH); * 03-Oct-2011 : Added tooltip and URL generator support (MH); * 03-Oct-2011 : Added some configuration options for the legend (MH); * 03-Oct-2011 : Added support for PolarPlot's angleOffset and direction (MH); * 16-Oct-2011 : Fixed serialization problems with fillComposite (MH); * 15-Jun-2012 : Remove JCommon dependencies (DG); * */ package org.jfree.chart.renderer; import java.awt.AlphaComposite; import java.awt.Composite; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Point; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.Ellipse2D; import java.awt.geom.GeneralPath; import java.awt.geom.Line2D; import java.awt.geom.PathIterator; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.List; import org.jfree.chart.LegendItem; import org.jfree.chart.axis.NumberTick; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.axis.ValueTick; import org.jfree.chart.util.BooleanList; import org.jfree.chart.util.ObjectList; import org.jfree.chart.util.ObjectUtilities; import org.jfree.chart.util.PublicCloneable; import org.jfree.chart.util.ShapeUtilities; import org.jfree.chart.entity.EntityCollection; import org.jfree.chart.entity.XYItemEntity; import org.jfree.chart.event.RendererChangeEvent; import org.jfree.chart.labels.XYSeriesLabelGenerator; import org.jfree.chart.labels.XYToolTipGenerator; import org.jfree.chart.plot.DrawingSupplier; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.plot.PlotRenderingInfo; import org.jfree.chart.plot.PolarPlot; import org.jfree.chart.renderer.xy.AbstractXYItemRenderer; import org.jfree.chart.text.TextUtilities; import org.jfree.chart.urls.XYURLGenerator; import org.jfree.chart.util.ParamChecks; import org.jfree.chart.util.SerialUtilities; import org.jfree.data.xy.XYDataset; /** * A renderer that can be used with the {@link PolarPlot} class. */ public class DefaultPolarItemRenderer extends AbstractRenderer implements PolarItemRenderer { private static final long serialVersionUID = 1L; /** The plot that the renderer is assigned to. */ private PolarPlot plot; /** Flags that control whether the renderer fills each series or not. */ private BooleanList seriesFilled; /** * Flag that controls whether an outline is drawn for filled series or * not. * * @since 1.0.14 */ private boolean drawOutlineWhenFilled; /** * The composite to use when filling series. * * @since 1.0.14 */ private transient Composite fillComposite; /** * A flag that controls whether the fill paint is used for filling * shapes. * * @since 1.0.14 */ private boolean useFillPaint; /** * The shape that is used to represent a line in the legend. * * @since 1.0.14 */ private transient Shape legendLine; /** * Flag that controls whether item shapes are visible or not. * * @since 1.0.14 */ private boolean shapesVisible; /** * Flag that controls if the first and last point of the dataset should be * connected or not. * * @since 1.0.14 */ private boolean connectFirstAndLastPoint; /** * A list of tool tip generators (one per series). * * @since 1.0.14 */ private ObjectList<XYToolTipGenerator> toolTipGeneratorList; /** * The base tool tip generator. * * @since 1.0.14 */ private XYToolTipGenerator baseToolTipGenerator; /** * The URL text generator. * * @since 1.0.14 */ private XYURLGenerator urlGenerator; /** * The legend item tool tip generator. * * @since 1.0.14 */ private XYSeriesLabelGenerator legendItemToolTipGenerator; /** * The legend item URL generator. * * @since 1.0.14 */ private XYSeriesLabelGenerator legendItemURLGenerator; /** * Creates a new instance of DefaultPolarItemRenderer */ public DefaultPolarItemRenderer() { this.seriesFilled = new BooleanList(); this.drawOutlineWhenFilled = true; this.fillComposite = AlphaComposite.getInstance( AlphaComposite.SRC_OVER, 0.3f); this.useFillPaint = false; // use item paint for fills by default this.legendLine = new Line2D.Double(-7.0, 0.0, 7.0, 0.0); this.shapesVisible = true; this.connectFirstAndLastPoint = true; this.toolTipGeneratorList = new ObjectList<XYToolTipGenerator>(); this.urlGenerator = null; this.legendItemToolTipGenerator = null; this.legendItemURLGenerator = null; } /** * Set the plot associated with this renderer. * * @param plot the plot. * * @see #getPlot() */ @Override public void setPlot(PolarPlot plot) { this.plot = plot; } /** * Return the plot associated with this renderer. * * @return The plot. * * @see #setPlot(PolarPlot) */ @Override public PolarPlot getPlot() { return this.plot; } /** * Returns <code>true</code> if the renderer will draw an outline around * a filled polygon, <code>false</code> otherwise. * * @return A boolean. * * @since 1.0.14 */ public boolean getDrawOutlineWhenFilled() { return this.drawOutlineWhenFilled; } /** * Set the flag that controls whether the outline around a filled * polygon will be drawn or not and sends a {@link RendererChangeEvent} * to all registered listeners. * * @param drawOutlineWhenFilled the flag. * * @since 1.0.14 */ public void setDrawOutlineWhenFilled(boolean drawOutlineWhenFilled) { this.drawOutlineWhenFilled = drawOutlineWhenFilled; fireChangeEvent(); } /** * Get the composite that is used for filling. * * @return The composite (never <code>null</code>). * * @since 1.0.14 */ public Composite getFillComposite() { return this.fillComposite; } /** * Sets the composite which will be used for filling polygons and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param composite the composite to use (<code>null</code> not * permitted). * * @since 1.0.14 */ public void setFillComposite(Composite composite) { ParamChecks.nullNotPermitted(composite, "composite"); this.fillComposite = composite; fireChangeEvent(); } /** * Returns <code>true</code> if a shape will be drawn for every item, or * <code>false</code> if not. * * @return A boolean. * * @since 1.0.14 */ public boolean getShapesVisible() { return this.shapesVisible; } /** * Set the flag that controls whether a shape will be drawn for every * item, or not and sends a {@link RendererChangeEvent} to all registered * listeners. * * @param visible the flag. * * @since 1.0.14 */ public void setShapesVisible(boolean visible) { this.shapesVisible = visible; fireChangeEvent(); } /** * Returns <code>true</code> if first and last point of a series will be * connected, <code>false</code> otherwise. * * @return The current status of the flag. * * @since 1.0.14 */ public boolean getConnectFirstAndLastPoint() { return this.connectFirstAndLastPoint; } /** * Set the flag that controls whether the first and last point of a series * will be connected or not and sends a {@link RendererChangeEvent} to all * registered listeners. * * @param connect the flag. * * @since 1.0.14 */ public void setConnectFirstAndLastPoint(boolean connect) { this.connectFirstAndLastPoint = connect; fireChangeEvent(); } /** * Returns the drawing supplier from the plot. * * @return The drawing supplier. */ @Override public DrawingSupplier getDrawingSupplier() { DrawingSupplier result = null; PolarPlot p = getPlot(); if (p != null) { result = p.getDrawingSupplier(); } return result; } /** * Returns <code>true</code> if the renderer should fill the specified * series, and <code>false</code> otherwise. * * @param series the series index (zero-based). * * @return A boolean. */ public boolean isSeriesFilled(int series) { boolean result = false; Boolean b = this.seriesFilled.getBoolean(series); if (b != null) { result = b; } return result; } /** * Sets a flag that controls whether or not a series is filled. * * @param series the series index. * @param filled the flag. */ public void setSeriesFilled(int series, boolean filled) { this.seriesFilled.setBoolean(series, filled); } /** * Returns <code>true</code> if the renderer should use the fill paint * setting to fill shapes, and <code>false</code> if it should just * use the regular paint. * * @return A boolean. * * @see #setUseFillPaint(boolean) * @since 1.0.14 */ public boolean getUseFillPaint() { return this.useFillPaint; } /** * Sets the flag that controls whether the fill paint is used to fill * shapes, and sends a {@link RendererChangeEvent} to all * registered listeners. * * @param flag the flag. * * @see #getUseFillPaint() * @since 1.0.14 */ public void setUseFillPaint(boolean flag) { this.useFillPaint = flag; fireChangeEvent(); } /** * Returns the shape used to represent a line in the legend. * * @return The legend line (never <code>null</code>). * * @see #setLegendLine(Shape) */ public Shape getLegendLine() { return this.legendLine; } /** * Sets the shape used as a line in each legend item and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param line the line (<code>null</code> not permitted). * * @see #getLegendLine() */ public void setLegendLine(Shape line) { ParamChecks.nullNotPermitted(line, "line"); this.legendLine = line; fireChangeEvent(); } /** * Adds an entity to the collection. * * @param entities the entity collection being populated. * @param area the entity area (if <code>null</code> a default will be * used). * @param dataset the dataset. * @param series the series. * @param item the item. * @param entityX the entity's center x-coordinate in user space (only * used if <code>area</code> is <code>null</code>). * @param entityY the entity's center y-coordinate in user space (only * used if <code>area</code> is <code>null</code>). */ protected void addEntity(EntityCollection entities, Shape area, XYDataset dataset, int series, int item, double entityX, double entityY) { if (!getItemCreateEntity(series, item)) { return; } Shape hotspot = area; if (hotspot == null) { double r = getDefaultEntityRadius(); double w = r * 2; if (getPlot().getOrientation() == PlotOrientation.VERTICAL) { hotspot = new Ellipse2D.Double(entityX - r, entityY - r, w, w); } else { hotspot = new Ellipse2D.Double(entityY - r, entityX - r, w, w); } } String tip = null; XYToolTipGenerator generator = getToolTipGenerator(series, item); if (generator != null) { tip = generator.generateToolTip(dataset, series, item); } String url = null; if (getURLGenerator() != null) { url = getURLGenerator().generateURL(dataset, series, item); } XYItemEntity entity = new XYItemEntity(hotspot, dataset, series, item, tip, url); entities.add(entity); } /** * Plots the data for a given series. * * @param g2 the drawing surface. * @param dataArea the data area. * @param info collects plot rendering info. * @param plot the plot. * @param dataset the dataset. * @param seriesIndex the series index. */ @Override public void drawSeries(Graphics2D g2, Rectangle2D dataArea, PlotRenderingInfo info, PolarPlot plot, XYDataset dataset, int seriesIndex) { final int numPoints = dataset.getItemCount(seriesIndex); if (numPoints == 0) { return; } GeneralPath poly = null; ValueAxis axis = plot.getAxisForDataset(plot.indexOf(dataset)); for (int i = 0; i < numPoints; i++) { double theta = dataset.getXValue(seriesIndex, i); double radius = dataset.getYValue(seriesIndex, i); Point p = plot.translateToJava2D(theta, radius, axis, dataArea); if (poly == null) { poly = new GeneralPath(); poly.moveTo(p.x, p.y); } else { poly.lineTo(p.x, p.y); } } if (getConnectFirstAndLastPoint()) { poly.closePath(); } g2.setPaint(lookupSeriesPaint(seriesIndex)); g2.setStroke(lookupSeriesStroke(seriesIndex)); if (isSeriesFilled(seriesIndex)) { Composite savedComposite = g2.getComposite(); g2.setComposite(this.fillComposite); g2.fill(poly); g2.setComposite(savedComposite); if (this.drawOutlineWhenFilled) { // draw the outline of the filled polygon g2.setPaint(lookupSeriesOutlinePaint(seriesIndex)); g2.draw(poly); } } else { // just the lines, no filling g2.draw(poly); } // draw the item shapes if (this.shapesVisible) { // setup for collecting optional entity info... EntityCollection entities = null; if (info != null) { entities = info.getOwner().getEntityCollection(); } PathIterator pi = poly.getPathIterator(null); int i = 0; while (!pi.isDone()) { final float[] coords = new float[6]; final int segType = pi.currentSegment(coords); pi.next(); if (segType != PathIterator.SEG_LINETO && segType != PathIterator.SEG_MOVETO) { continue; } final int x = Math.round(coords[0]); final int y = Math.round(coords[1]); final Shape shape = ShapeUtilities.createTranslatedShape( getItemShape(seriesIndex, i++), x, y); Paint paint; if (useFillPaint) { paint = lookupSeriesFillPaint(seriesIndex); } else { paint = lookupSeriesPaint(seriesIndex); } g2.setPaint(paint); g2.fill(shape); if (isSeriesFilled(seriesIndex) && this.drawOutlineWhenFilled) { g2.setPaint(lookupSeriesOutlinePaint(seriesIndex)); g2.setStroke(lookupSeriesOutlineStroke(seriesIndex)); g2.draw(shape); } // add an entity for the item, but only if it falls within the // data area... if (entities != null && AbstractXYItemRenderer.isPointInRect(dataArea, x, y)) { addEntity(entities, shape, dataset, seriesIndex, i-1, x, y); } } } } /** * Draw the angular gridlines - the spokes. * * @param g2 the drawing surface. * @param plot the plot (<code>null</code> not permitted). * @param ticks the ticks (<code>null</code> not permitted). * @param dataArea the data area. */ @Override public void drawAngularGridLines(Graphics2D g2, PolarPlot plot, List<ValueTick> ticks, Rectangle2D dataArea) { g2.setFont(plot.getAngleLabelFont()); g2.setStroke(plot.getAngleGridlineStroke()); g2.setPaint(plot.getAngleGridlinePaint()); ValueAxis axis = plot.getAxis(); double centerValue, outerValue; if (axis.isInverted()) { outerValue = axis.getLowerBound(); centerValue = axis.getUpperBound(); } else { outerValue = axis.getUpperBound(); centerValue = axis.getLowerBound(); } Point center = plot.translateToJava2D(0, centerValue, axis, dataArea); for (ValueTick tick : ticks) { double tickVal = tick.getValue(); Point p = plot.translateToJava2D(tickVal, outerValue, plot.getAxis(), dataArea); g2.setPaint(plot.getAngleGridlinePaint()); g2.drawLine(center.x, center.y, p.x, p.y); if (plot.isAngleLabelsVisible()) { int x = p.x; int y = p.y; g2.setPaint(plot.getAngleLabelPaint()); TextUtilities.drawAlignedString(tick.getText(), g2, x, y, tick.getTextAnchor()); } } } /** * Draw the radial gridlines - the rings. * * @param g2 the drawing surface (<code>null</code> not permitted). * @param plot the plot (<code>null</code> not permitted). * @param radialAxis the radial axis (<code>null</code> not permitted). * @param ticks the ticks (<code>null</code> not permitted). * @param dataArea the data area. */ @Override public void drawRadialGridLines(Graphics2D g2, PolarPlot plot, ValueAxis radialAxis, List<ValueTick> ticks, Rectangle2D dataArea) { ParamChecks.nullNotPermitted(radialAxis, "radialAxis"); g2.setFont(radialAxis.getTickLabelFont()); g2.setPaint(plot.getRadiusGridlinePaint()); g2.setStroke(plot.getRadiusGridlineStroke()); double centerValue; if (radialAxis.isInverted()) { centerValue = radialAxis.getUpperBound(); } else { centerValue = radialAxis.getLowerBound(); } Point center = plot.translateToJava2D(0, centerValue, radialAxis, dataArea); for (ValueTick tick : ticks) { double angleDegrees = plot.isCounterClockwise() ? plot.getAngleOffset() : -plot.getAngleOffset(); Point p = plot.translateToJava2D(angleDegrees, ((NumberTick)tick).getNumber().doubleValue(), radialAxis, dataArea); int r = p.x - center.x; int upperLeftX = center.x - r; int upperLeftY = center.y - r; int d = 2 * r; Ellipse2D ring = new Ellipse2D.Double(upperLeftX, upperLeftY, d, d); g2.setPaint(plot.getRadiusGridlinePaint()); g2.draw(ring); } } /** * Return the legend for the given series. * * @param series the series index. * * @return The legend item. */ @Override public LegendItem getLegendItem(int series) { LegendItem result; PolarPlot plot = getPlot(); if (plot == null) { return null; } XYDataset dataset = plot.getDataset(plot.getIndexOf(this)); if (dataset == null) { return null; } String toolTipText = null; if (getLegendItemToolTipGenerator() != null) { toolTipText = getLegendItemToolTipGenerator().generateLabel( dataset, series); } String urlText = null; if (getLegendItemURLGenerator() != null) { urlText = getLegendItemURLGenerator().generateLabel(dataset, series); } Comparable seriesKey = dataset.getSeriesKey(series); String label = seriesKey.toString(); String description = label; Shape shape = lookupSeriesShape(series); Paint paint; if (this.useFillPaint) { paint = lookupSeriesFillPaint(series); } else { paint = lookupSeriesPaint(series); } Stroke stroke = lookupSeriesStroke(series); Paint outlinePaint = lookupSeriesOutlinePaint(series); Stroke outlineStroke = lookupSeriesOutlineStroke(series); boolean shapeOutlined = isSeriesFilled(series) && this.drawOutlineWhenFilled; result = new LegendItem(label, description, toolTipText, urlText, getShapesVisible(), shape, /* shapeFilled=*/ true, paint, shapeOutlined, outlinePaint, outlineStroke, /* lineVisible= */ true, this.legendLine, stroke, paint); result.setToolTipText(toolTipText); result.setURLText(urlText); result.setDataset(dataset); result.setSeriesKey(seriesKey); result.setSeriesIndex(series); return result; } /** * Returns the tooltip generator for the specified series and item. * * @param series the series index. * @param item the item index. * * @return The tooltip generator (possibly <code>null</code>). * * @since 1.0.14 */ @Override public XYToolTipGenerator getToolTipGenerator(int series, int item) { XYToolTipGenerator generator = this.toolTipGeneratorList.get(series); if (generator == null) { generator = this.baseToolTipGenerator; } return generator; } /** * Returns the tool tip generator for the specified series. * * @return The tooltip generator (possibly <code>null</code>). * * @since 1.0.14 */ @Override public XYToolTipGenerator getSeriesToolTipGenerator(int series) { return this.toolTipGeneratorList.get(series); } /** * Sets the tooltip generator for the specified series. * * @param series the series index. * @param generator the tool tip generator (<code>null</code> permitted). * * @since 1.0.14 */ @Override public void setSeriesToolTipGenerator(int series, XYToolTipGenerator generator) { this.toolTipGeneratorList.set(series, generator); fireChangeEvent(); } /** * Returns the default tool tip generator. * * @return The default tool tip generator (possibly <code>null</code>). * * @since 1.0.14 */ @Override public XYToolTipGenerator getBaseToolTipGenerator() { return this.baseToolTipGenerator; } /** * Sets the default tool tip generator and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param generator the generator (<code>null</code> permitted). * * @since 1.0.14 */ @Override public void setBaseToolTipGenerator(XYToolTipGenerator generator) { this.baseToolTipGenerator = generator; fireChangeEvent(); } /** * Returns the URL generator. * * @return The URL generator (possibly <code>null</code>). * * @since 1.0.14 */ @Override public XYURLGenerator getURLGenerator() { return this.urlGenerator; } /** * Sets the URL generator. * * @param urlGenerator the generator (<code>null</code> permitted) * * @since 1.0.14 */ @Override public void setURLGenerator(XYURLGenerator urlGenerator) { this.urlGenerator = urlGenerator; fireChangeEvent(); } /** * Returns the legend item tool tip generator. * * @return The tool tip generator (possibly <code>null</code>). * * @see #setLegendItemToolTipGenerator(XYSeriesLabelGenerator) * @since 1.0.14 */ public XYSeriesLabelGenerator getLegendItemToolTipGenerator() { return this.legendItemToolTipGenerator; } /** * Sets the legend item tool tip generator and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param generator the generator (<code>null</code> permitted). * * @see #getLegendItemToolTipGenerator() * @since 1.0.14 */ public void setLegendItemToolTipGenerator( XYSeriesLabelGenerator generator) { this.legendItemToolTipGenerator = generator; fireChangeEvent(); } /** * Returns the legend item URL generator. * * @return The URL generator (possibly <code>null</code>). * * @see #setLegendItemURLGenerator(XYSeriesLabelGenerator) * @since 1.0.14 */ public XYSeriesLabelGenerator getLegendItemURLGenerator() { return this.legendItemURLGenerator; } /** * Sets the legend item URL generator and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param generator the generator (<code>null</code> permitted). * * @see #getLegendItemURLGenerator() * @since 1.0.14 */ public void setLegendItemURLGenerator(XYSeriesLabelGenerator generator) { this.legendItemURLGenerator = generator; fireChangeEvent(); } /** * Tests this renderer for equality with an arbitrary object. * * @param obj the object (<code>null</code> not permitted). * * @return <code>true</code> if this renderer is equal to <code>obj</code>, * and <code>false</code> otherwise. */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof DefaultPolarItemRenderer)) { return false; } DefaultPolarItemRenderer that = (DefaultPolarItemRenderer) obj; if (!this.seriesFilled.equals(that.seriesFilled)) { return false; } if (this.drawOutlineWhenFilled != that.drawOutlineWhenFilled) { return false; } if (!ObjectUtilities.equal(this.fillComposite, that.fillComposite)) { return false; } if (this.useFillPaint != that.useFillPaint) { return false; } if (!ShapeUtilities.equal(this.legendLine, that.legendLine)) { return false; } if (this.shapesVisible != that.shapesVisible) { return false; } if (this.connectFirstAndLastPoint != that.connectFirstAndLastPoint) { return false; } if (!this.toolTipGeneratorList.equals(that.toolTipGeneratorList)) { return false; } if (!ObjectUtilities.equal(this.baseToolTipGenerator, that.baseToolTipGenerator)) { return false; } if (!ObjectUtilities.equal(this.urlGenerator, that.urlGenerator)) { return false; } if (!ObjectUtilities.equal(this.legendItemToolTipGenerator, that.legendItemToolTipGenerator)) { return false; } if (!ObjectUtilities.equal(this.legendItemURLGenerator, that.legendItemURLGenerator)) { return false; } return super.equals(obj); } /** * Returns a clone of the renderer. * * @return A clone. * * @throws CloneNotSupportedException if the renderer cannot be cloned. */ @Override public Object clone() throws CloneNotSupportedException { DefaultPolarItemRenderer clone = (DefaultPolarItemRenderer) super.clone(); if (this.legendLine != null) { clone.legendLine = ShapeUtilities.clone(this.legendLine); } clone.seriesFilled = (BooleanList) this.seriesFilled.clone(); clone.toolTipGeneratorList = (ObjectList<XYToolTipGenerator>) this.toolTipGeneratorList.clone(); if (clone.baseToolTipGenerator instanceof PublicCloneable) { clone.baseToolTipGenerator = ObjectUtilities.clone(this.baseToolTipGenerator); } if (clone.urlGenerator instanceof PublicCloneable) { clone.urlGenerator = ObjectUtilities.clone(this.urlGenerator); } if (clone.legendItemToolTipGenerator instanceof PublicCloneable) { clone.legendItemToolTipGenerator = ObjectUtilities.clone(this.legendItemToolTipGenerator); } if (clone.legendItemURLGenerator instanceof PublicCloneable) { clone.legendItemURLGenerator = ObjectUtilities.clone(this.legendItemURLGenerator); } return clone; } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.legendLine = SerialUtilities.readShape(stream); this.fillComposite = SerialUtilities.readComposite(stream); } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtilities.writeShape(this.legendLine, stream); SerialUtilities.writeComposite(this.fillComposite, stream); } }
src/main/java/org/jfree/chart/renderer/DefaultPolarItemRenderer.java
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2012, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners.] * * ----------------------------- * DefaultPolarItemRenderer.java * ----------------------------- * (C) Copyright 2004-2012, by Solution Engineering, Inc. and * Contributors. * * Original Author: Daniel Bridenbecker, Solution Engineering, Inc.; * Contributor(s): David Gilbert (for Object Refinery Limited); * Martin Hoeller (patch 2850344); * * Changes * ------- * 19-Jan-2004 : Version 1, contributed by DB with minor changes by DG (DG); * 15-Jul-2004 : Switched getX() with getXValue() and getY() with * getYValue() (DG); * 04-Oct-2004 : Renamed BooleanUtils --> BooleanUtilities (DG); * 20-Apr-2005 : Update for change to LegendItem class (DG); * ------------- JFREECHART 1.0.x --------------------------------------------- * 04-Aug-2006 : Implemented equals() and clone() (DG); * 02-Feb-2007 : Removed author tags from all over JFreeChart sources (DG); * 14-Mar-2007 : Fixed clone() method (DG); * 04-May-2007 : Fixed lookup for series paint and stroke (DG); * 18-May-2007 : Set dataset for LegendItem (DG); * 03-Sep-2009 : Applied patch 2850344 by Martin Hoeller (DG); * 27-Nov-2009 : Updated for modification to PolarItemRenderer interface (DG); * 03-Oct-2011 : Fixed potential NPE in equals() (MH); * 03-Oct-2011 : Added flag to connectFirstAndLastPoint (MH); * 03-Oct-2011 : Added tooltip and URL generator support (MH); * 03-Oct-2011 : Added some configuration options for the legend (MH); * 03-Oct-2011 : Added support for PolarPlot's angleOffset and direction (MH); * 16-Oct-2011 : Fixed serialization problems with fillComposite (MH); * 15-Jun-2012 : Remove JCommon dependencies (DG); * */ package org.jfree.chart.renderer; import java.awt.AlphaComposite; import java.awt.Composite; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Point; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.Ellipse2D; import java.awt.geom.GeneralPath; import java.awt.geom.Line2D; import java.awt.geom.PathIterator; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.List; import org.jfree.chart.LegendItem; import org.jfree.chart.axis.NumberTick; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.axis.ValueTick; import org.jfree.chart.util.BooleanList; import org.jfree.chart.util.ObjectList; import org.jfree.chart.util.ObjectUtilities; import org.jfree.chart.util.PublicCloneable; import org.jfree.chart.util.ShapeUtilities; import org.jfree.chart.entity.EntityCollection; import org.jfree.chart.entity.XYItemEntity; import org.jfree.chart.event.RendererChangeEvent; import org.jfree.chart.labels.XYSeriesLabelGenerator; import org.jfree.chart.labels.XYToolTipGenerator; import org.jfree.chart.plot.DrawingSupplier; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.plot.PlotRenderingInfo; import org.jfree.chart.plot.PolarPlot; import org.jfree.chart.renderer.xy.AbstractXYItemRenderer; import org.jfree.chart.text.TextUtilities; import org.jfree.chart.urls.XYURLGenerator; import org.jfree.chart.util.SerialUtilities; import org.jfree.data.xy.XYDataset; /** * A renderer that can be used with the {@link PolarPlot} class. */ public class DefaultPolarItemRenderer extends AbstractRenderer implements PolarItemRenderer { private static final long serialVersionUID = 1L; /** The plot that the renderer is assigned to. */ private PolarPlot plot; /** Flags that control whether the renderer fills each series or not. */ private BooleanList seriesFilled; /** * Flag that controls whether an outline is drawn for filled series or * not. * * @since 1.0.14 */ private boolean drawOutlineWhenFilled; /** * The composite to use when filling series. * * @since 1.0.14 */ private transient Composite fillComposite; /** * A flag that controls whether the fill paint is used for filling * shapes. * * @since 1.0.14 */ private boolean useFillPaint; /** * The shape that is used to represent a line in the legend. * * @since 1.0.14 */ private transient Shape legendLine; /** * Flag that controls whether item shapes are visible or not. * * @since 1.0.14 */ private boolean shapesVisible; /** * Flag that controls if the first and last point of the dataset should be * connected or not. * * @since 1.0.14 */ private boolean connectFirstAndLastPoint; /** * A list of tool tip generators (one per series). * * @since 1.0.14 */ private ObjectList<XYToolTipGenerator> toolTipGeneratorList; /** * The base tool tip generator. * * @since 1.0.14 */ private XYToolTipGenerator baseToolTipGenerator; /** * The URL text generator. * * @since 1.0.14 */ private XYURLGenerator urlGenerator; /** * The legend item tool tip generator. * * @since 1.0.14 */ private XYSeriesLabelGenerator legendItemToolTipGenerator; /** * The legend item URL generator. * * @since 1.0.14 */ private XYSeriesLabelGenerator legendItemURLGenerator; /** * Creates a new instance of DefaultPolarItemRenderer */ public DefaultPolarItemRenderer() { this.seriesFilled = new BooleanList(); this.drawOutlineWhenFilled = true; this.fillComposite = AlphaComposite.getInstance( AlphaComposite.SRC_OVER, 0.3f); this.useFillPaint = false; // use item paint for fills by default this.legendLine = new Line2D.Double(-7.0, 0.0, 7.0, 0.0); this.shapesVisible = true; this.connectFirstAndLastPoint = true; this.toolTipGeneratorList = new ObjectList<XYToolTipGenerator>(); this.urlGenerator = null; this.legendItemToolTipGenerator = null; this.legendItemURLGenerator = null; } /** * Set the plot associated with this renderer. * * @param plot the plot. * * @see #getPlot() */ @Override public void setPlot(PolarPlot plot) { this.plot = plot; } /** * Return the plot associated with this renderer. * * @return The plot. * * @see #setPlot(PolarPlot) */ @Override public PolarPlot getPlot() { return this.plot; } /** * Returns <code>true</code> if the renderer will draw an outline around * a filled polygon, <code>false</code> otherwise. * * @return A boolean. * * @since 1.0.14 */ public boolean getDrawOutlineWhenFilled() { return this.drawOutlineWhenFilled; } /** * Set the flag that controls whether the outline around a filled * polygon will be drawn or not and sends a {@link RendererChangeEvent} * to all registered listeners. * * @param drawOutlineWhenFilled the flag. * * @since 1.0.14 */ public void setDrawOutlineWhenFilled(boolean drawOutlineWhenFilled) { this.drawOutlineWhenFilled = drawOutlineWhenFilled; fireChangeEvent(); } /** * Get the composite that is used for filling. * * @return The composite (never <code>null</code>). * * @since 1.0.14 */ public Composite getFillComposite() { return this.fillComposite; } /** * Set the composite which will be used for filling polygons and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param composite the composite to use (<code>null</code> not * permitted). * * @since 1.0.14 */ public void setFillComposite(Composite composite) { if (composite == null) { throw new IllegalArgumentException("Null 'composite' argument."); } this.fillComposite = composite; fireChangeEvent(); } /** * Returns <code>true</code> if a shape will be drawn for every item, or * <code>false</code> if not. * * @return A boolean. * * @since 1.0.14 */ public boolean getShapesVisible() { return this.shapesVisible; } /** * Set the flag that controls whether a shape will be drawn for every * item, or not and sends a {@link RendererChangeEvent} to all registered * listeners. * * @param visible the flag. * * @since 1.0.14 */ public void setShapesVisible(boolean visible) { this.shapesVisible = visible; fireChangeEvent(); } /** * Returns <code>true</code> if first and last point of a series will be * connected, <code>false</code> otherwise. * * @return The current status of the flag. * * @since 1.0.14 */ public boolean getConnectFirstAndLastPoint() { return this.connectFirstAndLastPoint; } /** * Set the flag that controls whether the first and last point of a series * will be connected or not and sends a {@link RendererChangeEvent} to all * registered listeners. * * @param connect the flag. * * @since 1.0.14 */ public void setConnectFirstAndLastPoint(boolean connect) { this.connectFirstAndLastPoint = connect; fireChangeEvent(); } /** * Returns the drawing supplier from the plot. * * @return The drawing supplier. */ @Override public DrawingSupplier getDrawingSupplier() { DrawingSupplier result = null; PolarPlot p = getPlot(); if (p != null) { result = p.getDrawingSupplier(); } return result; } /** * Returns <code>true</code> if the renderer should fill the specified * series, and <code>false</code> otherwise. * * @param series the series index (zero-based). * * @return A boolean. */ public boolean isSeriesFilled(int series) { boolean result = false; Boolean b = this.seriesFilled.getBoolean(series); if (b != null) { result = b; } return result; } /** * Sets a flag that controls whether or not a series is filled. * * @param series the series index. * @param filled the flag. */ public void setSeriesFilled(int series, boolean filled) { this.seriesFilled.setBoolean(series, filled); } /** * Returns <code>true</code> if the renderer should use the fill paint * setting to fill shapes, and <code>false</code> if it should just * use the regular paint. * * @return A boolean. * * @see #setUseFillPaint(boolean) * @since 1.0.14 */ public boolean getUseFillPaint() { return this.useFillPaint; } /** * Sets the flag that controls whether the fill paint is used to fill * shapes, and sends a {@link RendererChangeEvent} to all * registered listeners. * * @param flag the flag. * * @see #getUseFillPaint() * @since 1.0.14 */ public void setUseFillPaint(boolean flag) { this.useFillPaint = flag; fireChangeEvent(); } /** * Returns the shape used to represent a line in the legend. * * @return The legend line (never <code>null</code>). * * @see #setLegendLine(Shape) */ public Shape getLegendLine() { return this.legendLine; } /** * Sets the shape used as a line in each legend item and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param line the line (<code>null</code> not permitted). * * @see #getLegendLine() */ public void setLegendLine(Shape line) { if (line == null) { throw new IllegalArgumentException("Null 'line' argument."); } this.legendLine = line; fireChangeEvent(); } /** * Adds an entity to the collection. * * @param entities the entity collection being populated. * @param area the entity area (if <code>null</code> a default will be * used). * @param dataset the dataset. * @param series the series. * @param item the item. * @param entityX the entity's center x-coordinate in user space (only * used if <code>area</code> is <code>null</code>). * @param entityY the entity's center y-coordinate in user space (only * used if <code>area</code> is <code>null</code>). */ // this method was copied from AbstractXYItemRenderer on 03-Oct-2011 protected void addEntity(EntityCollection entities, Shape area, XYDataset dataset, int series, int item, double entityX, double entityY) { if (!getItemCreateEntity(series, item)) { return; } Shape hotspot = area; if (hotspot == null) { double r = getDefaultEntityRadius(); double w = r * 2; if (getPlot().getOrientation() == PlotOrientation.VERTICAL) { hotspot = new Ellipse2D.Double(entityX - r, entityY - r, w, w); } else { hotspot = new Ellipse2D.Double(entityY - r, entityX - r, w, w); } } String tip = null; XYToolTipGenerator generator = getToolTipGenerator(series, item); if (generator != null) { tip = generator.generateToolTip(dataset, series, item); } String url = null; if (getURLGenerator() != null) { url = getURLGenerator().generateURL(dataset, series, item); } XYItemEntity entity = new XYItemEntity(hotspot, dataset, series, item, tip, url); entities.add(entity); } /** * Plots the data for a given series. * * @param g2 the drawing surface. * @param dataArea the data area. * @param info collects plot rendering info. * @param plot the plot. * @param dataset the dataset. * @param seriesIndex the series index. */ @Override public void drawSeries(Graphics2D g2, Rectangle2D dataArea, PlotRenderingInfo info, PolarPlot plot, XYDataset dataset, int seriesIndex) { GeneralPath poly = null; ValueAxis axis = plot.getAxisForDataset(plot.indexOf(dataset)); final int numPoints = dataset.getItemCount(seriesIndex); for (int i = 0; i < numPoints; i++) { double theta = dataset.getXValue(seriesIndex, i); double radius = dataset.getYValue(seriesIndex, i); Point p = plot.translateToJava2D(theta, radius, axis, dataArea); if (poly == null) { poly = new GeneralPath(); poly.moveTo(p.x, p.y); } else { poly.lineTo(p.x, p.y); } } if (getConnectFirstAndLastPoint()) { poly.closePath(); } g2.setPaint(lookupSeriesPaint(seriesIndex)); g2.setStroke(lookupSeriesStroke(seriesIndex)); if (isSeriesFilled(seriesIndex)) { Composite savedComposite = g2.getComposite(); g2.setComposite(this.fillComposite); g2.fill(poly); g2.setComposite(savedComposite); if (this.drawOutlineWhenFilled) { // draw the outline of the filled polygon g2.setPaint(lookupSeriesOutlinePaint(seriesIndex)); g2.draw(poly); } } else { // just the lines, no filling g2.draw(poly); } // draw the item shapes if (this.shapesVisible) { // setup for collecting optional entity info... EntityCollection entities = null; if (info != null) { entities = info.getOwner().getEntityCollection(); } PathIterator pi = poly.getPathIterator(null); int i = 0; while (!pi.isDone()) { final float[] coords = new float[6]; final int segType = pi.currentSegment(coords); pi.next(); if (segType != PathIterator.SEG_LINETO && segType != PathIterator.SEG_MOVETO) { continue; } final int x = Math.round(coords[0]); final int y = Math.round(coords[1]); final Shape shape = ShapeUtilities.createTranslatedShape( getItemShape(seriesIndex, i++), x, y); Paint paint; if (useFillPaint) { paint = lookupSeriesFillPaint(seriesIndex); } else { paint = lookupSeriesPaint(seriesIndex); } g2.setPaint(paint); g2.fill(shape); if (isSeriesFilled(seriesIndex) && this.drawOutlineWhenFilled) { g2.setPaint(lookupSeriesOutlinePaint(seriesIndex)); g2.setStroke(lookupSeriesOutlineStroke(seriesIndex)); g2.draw(shape); } // add an entity for the item, but only if it falls within the // data area... if (entities != null && AbstractXYItemRenderer.isPointInRect(dataArea, x, y)) { addEntity(entities, shape, dataset, seriesIndex, i-1, x, y); } } } } /** * Draw the angular gridlines - the spokes. * * @param g2 the drawing surface. * @param plot the plot. * @param ticks the ticks. * @param dataArea the data area. */ @Override public void drawAngularGridLines(Graphics2D g2, PolarPlot plot, List<ValueTick> ticks, Rectangle2D dataArea) { g2.setFont(plot.getAngleLabelFont()); g2.setStroke(plot.getAngleGridlineStroke()); g2.setPaint(plot.getAngleGridlinePaint()); double axisMin = plot.getAxis().getLowerBound(); double maxRadius = plot.getAxis().getUpperBound(); Point center = plot.translateToJava2D(axisMin, axisMin, plot.getAxis(), dataArea); for (ValueTick tick : ticks) { double tickVal = tick.getValue(); Point p = plot.translateToJava2D(tickVal, maxRadius, plot.getAxis(), dataArea); g2.setPaint(plot.getAngleGridlinePaint()); g2.drawLine(center.x, center.y, p.x, p.y); if (plot.isAngleLabelsVisible()) { int x = p.x; int y = p.y; g2.setPaint(plot.getAngleLabelPaint()); TextUtilities.drawAlignedString(tick.getText(), g2, x, y, tick.getTextAnchor()); } } } /** * Draw the radial gridlines - the rings. * * @param g2 the drawing surface. * @param plot the plot. * @param radialAxis the radial axis. * @param ticks the ticks. * @param dataArea the data area. */ @Override public void drawRadialGridLines(Graphics2D g2, PolarPlot plot, ValueAxis radialAxis, List<ValueTick> ticks, Rectangle2D dataArea) { g2.setFont(radialAxis.getTickLabelFont()); g2.setPaint(plot.getRadiusGridlinePaint()); g2.setStroke(plot.getRadiusGridlineStroke()); double axisMin = radialAxis.getLowerBound(); Point center = plot.translateToJava2D(axisMin, axisMin, radialAxis, dataArea); for (ValueTick tick : ticks) { double angleDegrees = plot.isCounterClockwise() ? plot.getAngleOffset() : -plot.getAngleOffset(); Point p = plot.translateToJava2D(angleDegrees, ((NumberTick)tick).getNumber().doubleValue(), radialAxis, dataArea); int r = p.x - center.x; int upperLeftX = center.x - r; int upperLeftY = center.y - r; int d = 2 * r; Ellipse2D ring = new Ellipse2D.Double(upperLeftX, upperLeftY, d, d); g2.setPaint(plot.getRadiusGridlinePaint()); g2.draw(ring); } } /** * Return the legend for the given series. * * @param series the series index. * * @return The legend item. */ @Override public LegendItem getLegendItem(int series) { LegendItem result; PolarPlot plot = getPlot(); if (plot == null) { return null; } XYDataset dataset = plot.getDataset(plot.getIndexOf(this)); if (dataset == null) { return null; } String toolTipText = null; if (getLegendItemToolTipGenerator() != null) { toolTipText = getLegendItemToolTipGenerator().generateLabel( dataset, series); } String urlText = null; if (getLegendItemURLGenerator() != null) { urlText = getLegendItemURLGenerator().generateLabel(dataset, series); } Comparable seriesKey = dataset.getSeriesKey(series); String label = seriesKey.toString(); String description = label; Shape shape = lookupSeriesShape(series); Paint paint; if (this.useFillPaint) { paint = lookupSeriesFillPaint(series); } else { paint = lookupSeriesPaint(series); } Stroke stroke = lookupSeriesStroke(series); Paint outlinePaint = lookupSeriesOutlinePaint(series); Stroke outlineStroke = lookupSeriesOutlineStroke(series); boolean shapeOutlined = isSeriesFilled(series) && this.drawOutlineWhenFilled; result = new LegendItem(label, description, toolTipText, urlText, getShapesVisible(), shape, /* shapeFilled=*/ true, paint, shapeOutlined, outlinePaint, outlineStroke, /* lineVisible= */ true, this.legendLine, stroke, paint); result.setToolTipText(toolTipText); result.setURLText(urlText); result.setDataset(dataset); result.setSeriesKey(seriesKey); result.setSeriesIndex(series); return result; } /** * @since 1.0.14 */ @Override public XYToolTipGenerator getToolTipGenerator(int series, int item) { XYToolTipGenerator generator = this.toolTipGeneratorList.get(series); if (generator == null) { generator = this.baseToolTipGenerator; } return generator; } /** * @since 1.0.14 */ @Override public XYToolTipGenerator getSeriesToolTipGenerator(int series) { return this.toolTipGeneratorList.get(series); } /** * @since 1.0.14 */ @Override public void setSeriesToolTipGenerator(int series, XYToolTipGenerator generator) { this.toolTipGeneratorList.set(series, generator); fireChangeEvent(); } /** * @since 1.0.14 */ @Override public XYToolTipGenerator getBaseToolTipGenerator() { return this.baseToolTipGenerator; } /** * @since 1.0.14 */ @Override public void setBaseToolTipGenerator(XYToolTipGenerator generator) { this.baseToolTipGenerator = generator; fireChangeEvent(); } /** * @since 1.0.14 */ @Override public XYURLGenerator getURLGenerator() { return this.urlGenerator; } /** * @since 1.0.14 */ @Override public void setURLGenerator(XYURLGenerator urlGenerator) { this.urlGenerator = urlGenerator; fireChangeEvent(); } /** * Returns the legend item tool tip generator. * * @return The tool tip generator (possibly <code>null</code>). * * @see #setLegendItemToolTipGenerator(XYSeriesLabelGenerator) * @since 1.0.14 */ public XYSeriesLabelGenerator getLegendItemToolTipGenerator() { return this.legendItemToolTipGenerator; } /** * Sets the legend item tool tip generator and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param generator the generator (<code>null</code> permitted). * * @see #getLegendItemToolTipGenerator() * @since 1.0.14 */ public void setLegendItemToolTipGenerator( XYSeriesLabelGenerator generator) { this.legendItemToolTipGenerator = generator; fireChangeEvent(); } /** * Returns the legend item URL generator. * * @return The URL generator (possibly <code>null</code>). * * @see #setLegendItemURLGenerator(XYSeriesLabelGenerator) * @since 1.0.14 */ public XYSeriesLabelGenerator getLegendItemURLGenerator() { return this.legendItemURLGenerator; } /** * Sets the legend item URL generator and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param generator the generator (<code>null</code> permitted). * * @see #getLegendItemURLGenerator() * @since 1.0.14 */ public void setLegendItemURLGenerator(XYSeriesLabelGenerator generator) { this.legendItemURLGenerator = generator; fireChangeEvent(); } /** * Tests this renderer for equality with an arbitrary object. * * @param obj the object (<code>null</code> not permitted). * * @return <code>true</code> if this renderer is equal to <code>obj</code>, * and <code>false</code> otherwise. */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof DefaultPolarItemRenderer)) { return false; } DefaultPolarItemRenderer that = (DefaultPolarItemRenderer) obj; if (!this.seriesFilled.equals(that.seriesFilled)) { return false; } if (this.drawOutlineWhenFilled != that.drawOutlineWhenFilled) { return false; } if (!ObjectUtilities.equal(this.fillComposite, that.fillComposite)) { return false; } if (this.useFillPaint != that.useFillPaint) { return false; } if (!ShapeUtilities.equal(this.legendLine, that.legendLine)) { return false; } if (this.shapesVisible != that.shapesVisible) { return false; } if (this.connectFirstAndLastPoint != that.connectFirstAndLastPoint) { return false; } if (!this.toolTipGeneratorList.equals(that.toolTipGeneratorList)) { return false; } if (!ObjectUtilities.equal(this.baseToolTipGenerator, that.baseToolTipGenerator)) { return false; } if (!ObjectUtilities.equal(this.urlGenerator, that.urlGenerator)) { return false; } if (!ObjectUtilities.equal(this.legendItemToolTipGenerator, that.legendItemToolTipGenerator)) { return false; } if (!ObjectUtilities.equal(this.legendItemURLGenerator, that.legendItemURLGenerator)) { return false; } return super.equals(obj); } /** * Returns a clone of the renderer. * * @return A clone. * * @throws CloneNotSupportedException if the renderer cannot be cloned. */ @Override public Object clone() throws CloneNotSupportedException { DefaultPolarItemRenderer clone = (DefaultPolarItemRenderer) super.clone(); if (this.legendLine != null) { clone.legendLine = ShapeUtilities.clone(this.legendLine); } clone.seriesFilled = (BooleanList) this.seriesFilled.clone(); clone.toolTipGeneratorList = (ObjectList<XYToolTipGenerator>) this.toolTipGeneratorList.clone(); if (clone.baseToolTipGenerator instanceof PublicCloneable) { clone.baseToolTipGenerator = ObjectUtilities.clone(this.baseToolTipGenerator); } if (clone.urlGenerator instanceof PublicCloneable) { clone.urlGenerator = ObjectUtilities.clone(this.urlGenerator); } if (clone.legendItemToolTipGenerator instanceof PublicCloneable) { clone.legendItemToolTipGenerator = ObjectUtilities.clone(this.legendItemToolTipGenerator); } if (clone.legendItemURLGenerator instanceof PublicCloneable) { clone.legendItemURLGenerator = ObjectUtilities.clone(this.legendItemURLGenerator); } return clone; } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.legendLine = SerialUtilities.readShape(stream); this.fillComposite = SerialUtilities.readComposite(stream); } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtilities.writeShape(this.legendLine, stream); SerialUtilities.writeComposite(this.fillComposite, stream); } }
Fix rendering bug when axis is inverted.
src/main/java/org/jfree/chart/renderer/DefaultPolarItemRenderer.java
Fix rendering bug when axis is inverted.
Java
lgpl-2.1
a496ec966689913bec6200f8e225bd3d0ff77a99
0
celements/celements-core,celements/celements-core
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package com.celements.web.plugin.cmd; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.xwiki.context.Execution; import org.xwiki.model.reference.DocumentReference; import com.celements.navigation.TreeNode; import com.celements.navigation.service.ITreeNodeService; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.web.Utils; public class EmptyCheckCommand { private static Log LOGGER = LogFactory.getFactory().getInstance( EmptyCheckCommand.class); private Set<DocumentReference> visitedDocRefs; ITreeNodeService treeNodeService; public DocumentReference getNextNonEmptyChildren(DocumentReference documentRef) { visitedDocRefs = new HashSet<DocumentReference>(); return getNextNonEmptyChildren_internal(documentRef); } private DocumentReference getNextNonEmptyChildren_internal( DocumentReference documentRef) { if (isEmptyRTEDocument(documentRef)) { List<TreeNode> children = getTreeNodeService().getSubNodesForParent( getFullNameForRef(documentRef), getSpaceName(documentRef), ""); if (children.size() > 0) { visitedDocRefs.add(documentRef); DocumentReference nextChild = children.get(0).getDocumentReference(); if (!visitedDocRefs.contains(nextChild)) { return getNextNonEmptyChildren_internal(nextChild); } else { LOGGER.warn("getNextNonEmptyChildren_internal: recursion detected on [" + nextChild + "]."); } } } return documentRef; } private String getFullNameForRef(DocumentReference docRef) { return getSpaceName(docRef) + "." + docRef.getName(); } private String getSpaceName(DocumentReference docRef) { return docRef.getLastSpaceReference().getName(); } /** * @deprecated since 2.9.4 use instead isEmptyRTEDocument(DocumentReference) **/ @Deprecated public boolean isEmptyRTEDocument(String fullname, XWikiContext context) { DocumentReference docRef = new DocumentReference(context.getDatabase(), fullname.split("\\.")[0], fullname.split("\\.")[1]); return isEmptyRTEDocumentDefault(docRef, context) && isEmptyRTEDocumentTranslated(docRef); } public boolean isEmptyRTEDocument(DocumentReference docRef) { return isEmptyRTEDocumentDefault(docRef, getContext()) && isEmptyRTEDocumentTranslated(docRef); } public boolean isEmptyRTEDocumentTranslated(DocumentReference docRef) { try { return isEmptyRTEDocument(getContext().getWiki( ).getDocument(docRef, getContext()).getTranslatedDocument(getContext())); } catch (XWikiException e) { LOGGER.error(e); } return true; } public boolean isEmptyRTEDocumentDefault(DocumentReference docRef, XWikiContext context) { try { return isEmptyRTEDocument(context.getWiki( ).getDocument(docRef, context)); } catch (XWikiException e) { LOGGER.error(e); } return true; } public boolean isEmptyRTEDocument(XWikiDocument localdoc) { return isEmptyRTEString(localdoc.getContent()); } public boolean isEmptyRTEString(String rteContent) { return "".equals(rteContent.replaceAll( "(<p>)?(<span.*?>)?(\\s*(&nbsp;|<br\\s*/>))*\\s*(</span>)?(</p>)?", "").trim()); } private XWikiContext getContext() { return (XWikiContext)Utils.getComponent(Execution.class).getContext().getProperty( "xwikicontext"); } ITreeNodeService getTreeNodeService() { if (treeNodeService != null) { return treeNodeService; } return Utils.getComponent(ITreeNodeService.class); } }
src/main/java/com/celements/web/plugin/cmd/EmptyCheckCommand.java
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package com.celements.web.plugin.cmd; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.xwiki.context.Execution; import org.xwiki.model.reference.DocumentReference; import com.celements.navigation.TreeNode; import com.celements.navigation.service.ITreeNodeService; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.web.Utils; public class EmptyCheckCommand { private static Log mLogger = LogFactory.getFactory().getInstance( EmptyCheckCommand.class); private Set<DocumentReference> visitedDocRefs; ITreeNodeService treeNodeService; public DocumentReference getNextNonEmptyChildren(DocumentReference documentRef) { visitedDocRefs = new HashSet<DocumentReference>(); return getNextNonEmptyChildren_internal(documentRef); } private DocumentReference getNextNonEmptyChildren_internal( DocumentReference documentRef) { if (isEmptyRTEDocument(documentRef)) { List<TreeNode> children = getTreeNodeService().getSubNodesForParent( getFullNameForRef(documentRef), getSpaceName(documentRef), ""); if (children.size() > 0) { visitedDocRefs.add(documentRef); DocumentReference nextChild = children.get(0).getDocumentReference(); if (!visitedDocRefs.contains(nextChild)) { return getNextNonEmptyChildren_internal(nextChild); } else { mLogger.warn("getNextNonEmptyChildren_internal: recursion detected on [" + nextChild + "]."); } } } return documentRef; } private String getFullNameForRef(DocumentReference docRef) { return getSpaceName(docRef) + "." + docRef.getName(); } private String getSpaceName(DocumentReference docRef) { return docRef.getLastSpaceReference().getName(); } /** * @deprecated since 2.9.4 use instead isEmptyRTEDocument(DocumentReference) **/ @Deprecated public boolean isEmptyRTEDocument(String fullname, XWikiContext context) { DocumentReference docRef = new DocumentReference(context.getDatabase(), fullname.split("\\.")[0], fullname.split("\\.")[1]); return isEmptyRTEDocumentDefault(docRef, context) && isEmptyRTEDocumentTranslated(docRef); } public boolean isEmptyRTEDocument(DocumentReference docRef) { return isEmptyRTEDocumentDefault(docRef, getContext()) && isEmptyRTEDocumentTranslated(docRef); } public boolean isEmptyRTEDocumentTranslated(DocumentReference docRef) { try { return isEmptyRTEDocument(getContext().getWiki( ).getDocument(docRef, getContext()).getTranslatedDocument(getContext())); } catch (XWikiException e) { mLogger.error(e); } return true; } public boolean isEmptyRTEDocumentDefault(DocumentReference docRef, XWikiContext context) { try { return isEmptyRTEDocument(context.getWiki( ).getDocument(docRef, context)); } catch (XWikiException e) { mLogger.error(e); } return true; } public boolean isEmptyRTEDocument(XWikiDocument localdoc) { return isEmptyRTEString(localdoc.getContent()); } public boolean isEmptyRTEString(String rteContent) { return "".equals(rteContent.replaceAll( "(<p>)?(<span.*?>)?(\\s*(&nbsp;|<br\\s*/>))*\\s*(</span>)?(</p>)?", "").trim()); } private XWikiContext getContext() { return (XWikiContext)Utils.getComponent(Execution.class).getContext().getProperty( "xwikicontext"); } ITreeNodeService getTreeNodeService() { if (treeNodeService != null) { return treeNodeService; } return Utils.getComponent(ITreeNodeService.class); } }
rename mLogger to LOGGER in EmptyCheckCommand
src/main/java/com/celements/web/plugin/cmd/EmptyCheckCommand.java
rename mLogger to LOGGER in EmptyCheckCommand
Java
apache-2.0
7c0fad434eea34fba4241340bd81f9fe73e114bf
0
GoogleCloudPlatform/java-docs-samples,GoogleCloudPlatform/java-docs-samples,GoogleCloudPlatform/java-docs-samples,GoogleCloudPlatform/java-docs-samples,GoogleCloudPlatform/java-docs-samples,GoogleCloudPlatform/java-docs-samples
/* * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.dialogflow; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import com.google.cloud.dialogflow.v2beta1.DeleteDocumentRequest; import com.google.cloud.dialogflow.v2beta1.Document; import com.google.cloud.dialogflow.v2beta1.DocumentName; import com.google.cloud.dialogflow.v2beta1.DocumentsClient; import com.google.cloud.dialogflow.v2beta1.KnowledgeAnswers; import com.google.cloud.dialogflow.v2beta1.KnowledgeAnswers.Answer; import com.google.cloud.dialogflow.v2beta1.KnowledgeBase; import com.google.cloud.dialogflow.v2beta1.KnowledgeBaseName; import com.google.cloud.dialogflow.v2beta1.KnowledgeBasesClient; import com.google.cloud.dialogflow.v2beta1.ProjectName; import com.google.common.collect.ImmutableList; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.List; import java.util.Map; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Integration (system) tests for {DetectIntentKnowledge, KnowledgeManagement, DocumentManagement}. */ @RunWith(JUnit4.class) @SuppressWarnings("checkstyle:abbreviationaswordinname") public class KnowledgeBaseManagementIT { private static String PROJECT_ID = System.getenv().get("GOOGLE_CLOUD_PROJECT"); private static String TEST_KNOWLEDGE_BASE_ID = "MTA4MzE0ODY5NTczMTQzNzU2ODA"; private static String TEST_DOCUMENT_ID = "MTUwNjk0ODg1NTU4NzkzMDExMg"; private static String SESSION_ID = "fake_session_for_testing"; private static String LANGUAGE_CODE = "en-US"; private static String KNOWLEDGE_BASE_NAME = "fake_knowledge_base_name"; private static String DOCUMENT_BASE_NAME = "fake_document_name"; private static List<String> TEXTS = ImmutableList .of("How do I sign up?", "Is my data redundant?", "Where can I find pricing information?", "Where is my data stored?", "What are my support options?", "How can I maximize the availability of my data?"); @Before public void setUp() { System.setOut(new PrintStream(new ByteArrayOutputStream())); } // If any knowledge base/documents remain after test complete, delete them. @After public void tearDown() throws Exception { try (KnowledgeBasesClient knowledgeBasesClient = KnowledgeBasesClient.create()) { try (DocumentsClient documentsClient = DocumentsClient.create()) { ProjectName projectName = ProjectName.of(PROJECT_ID); for (KnowledgeBase knowledgeBase : knowledgeBasesClient.listKnowledgeBases(projectName).iterateAll()) { // DO NOT DELETE THE TEST KNOWLEDGE BASE if (!knowledgeBase.getName().contains(TEST_KNOWLEDGE_BASE_ID)) { // Delete any documents in the knowledge base. for (Document document : documentsClient.listDocuments( knowledgeBase.getName()).iterateAll()) { // DO NOT DELETE THE TEST DOCUMENT if (!document.getName().contains(TEST_DOCUMENT_ID)) { documentsClient.deleteDocumentCallable().call( DeleteDocumentRequest.newBuilder().setName(document.getName()).build()); } } knowledgeBasesClient.deleteKnowledgeBase(knowledgeBase.getName()); } } } } System.setOut(null); } @Test public void testKnowledgeBase() throws Exception { // Check the knowledge base does not yet exist List<KnowledgeBase> knowledgeBases = KnowledgeBaseManagement.listKnowledgeBases(PROJECT_ID); assertEquals(1, knowledgeBases.size()); // Create a Knowledge Base KnowledgeBase knowledgeBase = KnowledgeBaseManagement.createKnowledgeBase(PROJECT_ID, KNOWLEDGE_BASE_NAME); assertEquals(knowledgeBase.getDisplayName(), KNOWLEDGE_BASE_NAME); // Get KnowledgeBase knowledgeBase = KnowledgeBaseManagement.getKnowledgeBase(knowledgeBase.getName()); assertEquals(knowledgeBase.getDisplayName(), KNOWLEDGE_BASE_NAME); // List Knowledge Bases knowledgeBases = KnowledgeBaseManagement.listKnowledgeBases(PROJECT_ID); assertEquals(2, knowledgeBases.size()); int found = 0; for (KnowledgeBase knowledgeBase1 : knowledgeBases) { if (knowledgeBase1.getDisplayName().equals(KNOWLEDGE_BASE_NAME)) { found += 1; } } assertEquals(1, found); // Delete the Knowledge Base KnowledgeBaseManagement.deleteKnowledgeBase(knowledgeBase.getName()); // List Knowledge Bases (ensure delete success) knowledgeBases = KnowledgeBaseManagement.listKnowledgeBases(PROJECT_ID); assertEquals(1, knowledgeBases.size()); } @Test public void testDocumentManagement() throws Exception { // Create a Knowledge Base KnowledgeBase knowledgeBase = KnowledgeBaseManagement.createKnowledgeBase(PROJECT_ID, KNOWLEDGE_BASE_NAME); String knowledgeBaseName = knowledgeBase.getName(); // Create a Document Document document = DocumentManagement.createDocument( knowledgeBaseName, DOCUMENT_BASE_NAME, "text/html", "FAQ", "https://cloud.google.com/storage/docs/faq"); assertEquals(DOCUMENT_BASE_NAME, document.getDisplayName()); // List the Documents List<Document> documents = DocumentManagement.listDocuments(knowledgeBaseName); assertEquals(1, documents.size()); assertEquals(DOCUMENT_BASE_NAME, documents.get(0).getDisplayName()); // Get the Document document = DocumentManagement.getDocument(document.getName()); assertEquals(DOCUMENT_BASE_NAME, document.getDisplayName()); // Delete the Document DocumentManagement.deleteDocument(document.getName()); // List the Document documents = DocumentManagement.listDocuments(knowledgeBaseName); assertEquals(0, documents.size()); } @Test public void testDetectIntentKnowledge() throws Exception { KnowledgeBaseName knowledgeBaseName = KnowledgeBaseName.newBuilder() .setProject(PROJECT_ID).setKnowledgeBase(TEST_KNOWLEDGE_BASE_ID).build(); DocumentName documentName = DocumentName.newBuilder() .setProject(PROJECT_ID) .setKnowledgeBase(TEST_KNOWLEDGE_BASE_ID) .setDocument(TEST_DOCUMENT_ID) .build(); Map<String, KnowledgeAnswers> allAnswers = DetectIntentKnowledge.detectIntentKnowledge( PROJECT_ID, knowledgeBaseName.toString(), SESSION_ID, LANGUAGE_CODE, TEXTS); assertEquals(TEXTS.size(), allAnswers.size()); int answersFound = 0; for (String text : TEXTS) { KnowledgeAnswers knowledgeAnswers = allAnswers.get(text); if (knowledgeAnswers.getAnswersCount() > 0) { Answer answer = knowledgeAnswers.getAnswers(0); if (text.equals(answer.getFaqQuestion()) && documentName.toString().equals(answer.getSource())) { answersFound++; } } } // To make the test less flaky, check that half of the texts got a result. assertThat(answersFound).isGreaterThan(TEXTS.size() / 2); } }
dialogflow/snippets/src/test/java/com/example/dialogflow/KnowledgeBaseManagementIT.java
/* * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.dialogflow; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import com.google.cloud.dialogflow.v2beta1.DeleteDocumentRequest; import com.google.cloud.dialogflow.v2beta1.Document; import com.google.cloud.dialogflow.v2beta1.DocumentName; import com.google.cloud.dialogflow.v2beta1.DocumentsClient; import com.google.cloud.dialogflow.v2beta1.KnowledgeAnswers; import com.google.cloud.dialogflow.v2beta1.KnowledgeAnswers.Answer; import com.google.cloud.dialogflow.v2beta1.KnowledgeBase; import com.google.cloud.dialogflow.v2beta1.KnowledgeBaseName; import com.google.cloud.dialogflow.v2beta1.KnowledgeBasesClient; import com.google.cloud.dialogflow.v2beta1.ProjectName; import com.google.common.collect.ImmutableList; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.List; import java.util.Map; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Integration (system) tests for {DetectIntentKnowledge, KnowledgeManagement, DocumentManagement}. */ @RunWith(JUnit4.class) @SuppressWarnings("checkstyle:abbreviationaswordinname") public class KnowledgeBaseManagementIT { private static String PROJECT_ID = System.getenv().get("GOOGLE_CLOUD_PROJECT"); private static String TEST_KNOWLEDGE_BASE_ID = "MTUwMTg2NzM1MjY0OTAwMDU1MDQ"; private static String TEST_DOCUMENT_ID = "MTE4MTI3OTY2ODcwNTc5NDQ1NzY"; private static String SESSION_ID = "fake_session_for_testing"; private static String LANGUAGE_CODE = "en-US"; private static String KNOWLEDGE_BASE_NAME = "fake_knowledge_base_name"; private static String DOCUMENT_BASE_NAME = "fake_document_name"; private static List<String> TEXTS = ImmutableList .of("How do I sign up?", "Is my data redundant?", "Where can I find pricing information?", "Where is my data stored?", "What are my support options?", "How can I maximize the availability of my data?"); @Before public void setUp() { System.setOut(new PrintStream(new ByteArrayOutputStream())); } // If any knowledge base/documents remain after test complete, delete them. @After public void tearDown() throws Exception { try (KnowledgeBasesClient knowledgeBasesClient = KnowledgeBasesClient.create()) { try (DocumentsClient documentsClient = DocumentsClient.create()) { ProjectName projectName = ProjectName.of(PROJECT_ID); for (KnowledgeBase knowledgeBase : knowledgeBasesClient.listKnowledgeBases(projectName).iterateAll()) { // DO NOT DELETE THE TEST KNOWLEDGE BASE if (!knowledgeBase.getName().contains(TEST_KNOWLEDGE_BASE_ID)) { // Delete any documents in the knowledge base. for (Document document : documentsClient.listDocuments( knowledgeBase.getName()).iterateAll()) { // DO NOT DELETE THE TEST DOCUMENT if (!document.getName().contains(TEST_DOCUMENT_ID)) { documentsClient.deleteDocumentCallable().call( DeleteDocumentRequest.newBuilder().setName(document.getName()).build()); } } knowledgeBasesClient.deleteKnowledgeBase(knowledgeBase.getName()); } } } } System.setOut(null); } @Test public void testKnowledgeBase() throws Exception { // Check the knowledge base does not yet exist List<KnowledgeBase> knowledgeBases = KnowledgeBaseManagement.listKnowledgeBases(PROJECT_ID); assertEquals(1, knowledgeBases.size()); // Create a Knowledge Base KnowledgeBase knowledgeBase = KnowledgeBaseManagement.createKnowledgeBase(PROJECT_ID, KNOWLEDGE_BASE_NAME); assertEquals(knowledgeBase.getDisplayName(), KNOWLEDGE_BASE_NAME); // Get KnowledgeBase knowledgeBase = KnowledgeBaseManagement.getKnowledgeBase(knowledgeBase.getName()); assertEquals(knowledgeBase.getDisplayName(), KNOWLEDGE_BASE_NAME); // List Knowledge Bases knowledgeBases = KnowledgeBaseManagement.listKnowledgeBases(PROJECT_ID); assertEquals(2, knowledgeBases.size()); int found = 0; for (KnowledgeBase knowledgeBase1 : knowledgeBases) { if (knowledgeBase1.getDisplayName().equals(KNOWLEDGE_BASE_NAME)) { found += 1; } } assertEquals(1, found); // Delete the Knowledge Base KnowledgeBaseManagement.deleteKnowledgeBase(knowledgeBase.getName()); // List Knowledge Bases (ensure delete success) knowledgeBases = KnowledgeBaseManagement.listKnowledgeBases(PROJECT_ID); assertEquals(1, knowledgeBases.size()); } @Test public void testDocumentManagement() throws Exception { // Create a Knowledge Base KnowledgeBase knowledgeBase = KnowledgeBaseManagement.createKnowledgeBase(PROJECT_ID, KNOWLEDGE_BASE_NAME); String knowledgeBaseName = knowledgeBase.getName(); // Create a Document Document document = DocumentManagement.createDocument( knowledgeBaseName, DOCUMENT_BASE_NAME, "text/html", "FAQ", "https://cloud.google.com/storage/docs/faq"); assertEquals(DOCUMENT_BASE_NAME, document.getDisplayName()); // List the Documents List<Document> documents = DocumentManagement.listDocuments(knowledgeBaseName); assertEquals(1, documents.size()); assertEquals(DOCUMENT_BASE_NAME, documents.get(0).getDisplayName()); // Get the Document document = DocumentManagement.getDocument(document.getName()); assertEquals(DOCUMENT_BASE_NAME, document.getDisplayName()); // Delete the Document DocumentManagement.deleteDocument(document.getName()); // List the Document documents = DocumentManagement.listDocuments(knowledgeBaseName); assertEquals(0, documents.size()); } @Test public void testDetectIntentKnowledge() throws Exception { KnowledgeBaseName knowledgeBaseName = KnowledgeBaseName.newBuilder() .setProject(PROJECT_ID).setKnowledgeBase(TEST_KNOWLEDGE_BASE_ID).build(); DocumentName documentName = DocumentName.newBuilder() .setProject(PROJECT_ID) .setKnowledgeBase(TEST_KNOWLEDGE_BASE_ID) .setDocument(TEST_DOCUMENT_ID) .build(); Map<String, KnowledgeAnswers> allAnswers = DetectIntentKnowledge.detectIntentKnowledge( PROJECT_ID, knowledgeBaseName.toString(), SESSION_ID, LANGUAGE_CODE, TEXTS); assertEquals(TEXTS.size(), allAnswers.size()); int answersFound = 0; for (String text : TEXTS) { KnowledgeAnswers knowledgeAnswers = allAnswers.get(text); if (knowledgeAnswers.getAnswersCount() > 0) { Answer answer = knowledgeAnswers.getAnswers(0); if (text.equals(answer.getFaqQuestion()) && documentName.toString().equals(answer.getSource())) { answersFound++; } } } // To make the test less flaky, check that half of the texts got a result. assertThat(answersFound).isGreaterThan(TEXTS.size() / 2); } }
samples: Update KnowledgeBaseId and update rouge SA roles (#1553)
dialogflow/snippets/src/test/java/com/example/dialogflow/KnowledgeBaseManagementIT.java
samples: Update KnowledgeBaseId and update rouge SA roles (#1553)
Java
apache-2.0
9bf791b1feefbb33223f375650a06c0757fae7f6
0
PathVisio/pathvisio,PathVisio/pathvisio,markwoon/pathvisio,markwoon/pathvisio,PathVisio/pathvisio,markwoon/pathvisio,markwoon/pathvisio,PathVisio/pathvisio
// PathVisio, // a tool for data visualization and analysis using Biological Pathways // Copyright 2006-2009 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.pathvisio.gui.swing; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.swing.Action; import javax.swing.ActionMap; import javax.swing.ImageIcon; import javax.swing.InputMap; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTabbedPane; import javax.swing.JTable; import javax.swing.JToolBar; import javax.swing.KeyStroke; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import javax.swing.table.TableCellEditor; import javax.swing.table.TableCellRenderer; import org.pathvisio.ApplicationEvent; import org.pathvisio.Engine.ApplicationEventListener; import org.pathvisio.debug.Logger; import org.pathvisio.gui.BackpageTextProvider; import org.pathvisio.gui.BackpageTextProvider.BackpageAttributes; import org.pathvisio.gui.BackpageTextProvider.BackpageXrefs; import org.pathvisio.gui.swing.CommonActions.ZoomAction; import org.pathvisio.gui.swing.dialogs.PathwayElementDialog; import org.pathvisio.gui.swing.dnd.PathwayImportHandler; import org.pathvisio.gui.swing.propertypanel.PathwayTableModel; import org.pathvisio.model.PathwayElement; import org.pathvisio.util.Resources; import org.pathvisio.view.Graphics; import org.pathvisio.view.Handle; import org.pathvisio.view.SelectionBox; import org.pathvisio.view.VPathway; import org.pathvisio.view.VPathwayElement; import org.pathvisio.view.VPathwayEvent; import org.pathvisio.view.VPathwayListener; import com.mammothsoftware.frwk.ddb.DropDownButton; /** * this is the contents of the main window in the WikiPathways applet, * and contains the editor window, side panels, toolbar and menu. * * For the standalone application, the derived class MainPanelStandalone is used. */ public class MainPanel extends JPanel implements VPathwayListener, ApplicationEventListener { private JSplitPane splitPane; protected JToolBar toolBar; private JScrollPane pathwayScrollPane; private JScrollPane propertiesScrollPane; protected JTabbedPane sidebarTabbedPane; protected JMenuBar menuBar; private JTable propertyTable; protected BackpagePane backpagePane; protected BackpageTextProvider bpt; protected CommonActions actions; private final PathwayTableModel model; Set<Action> hideActions; protected SwingEngine swingEngine; private final PathwayElementMenuListener pathwayElementMenuListener; public PathwayTableModel getModel(){ return model; } public PathwayElementMenuListener getPathwayElementMenuListener() { return pathwayElementMenuListener; } private boolean mayAddAction(Action a) { return hideActions == null || !hideActions.contains(a); } protected void addMenuActions(JMenuBar mb) { JMenu fileMenu = new JMenu("File"); addToMenu(actions.saveAction, fileMenu); addToMenu(actions.saveAsAction, fileMenu); fileMenu.addSeparator(); addToMenu(actions.importAction, fileMenu); addToMenu(actions.exportAction, fileMenu); fileMenu.addSeparator(); addToMenu(actions.exitAction, fileMenu); JMenu editMenu = new JMenu("Edit"); addToMenu(actions.undoAction, editMenu); addToMenu(actions.copyAction, editMenu); addToMenu(actions.pasteAction, editMenu); editMenu.addSeparator(); JMenu selectionMenu = new JMenu("Selection"); for(Action a : actions.layoutActions) addToMenu(a, selectionMenu); editMenu.add (selectionMenu); JMenu viewMenu = new JMenu("View"); JMenu zoomMenu = new JMenu("Zoom"); viewMenu.add(zoomMenu); for(Action a : actions.zoomActions) addToMenu(a, zoomMenu); JMenu helpMenu = new JMenu("Help"); mb.add(fileMenu); mb.add(editMenu); mb.add(viewMenu); mb.add(helpMenu); } /** * Constructor for this class. Creates the main panel of this application, containing * the main GUI elements (menubar, toolbar, sidepanel, drawing pane). Actions that should * not be added to the menubar and toolbar should be specified in the hideActions parameter * @param hideActions The {@link Action}s that should not be added to the toolbar and menubar */ public MainPanel(SwingEngine swingEngine, Set<Action> hideActions) { this.hideActions = hideActions; this.swingEngine = swingEngine; pathwayElementMenuListener = new PathwayElementMenuListener(swingEngine); model = new PathwayTableModel(swingEngine); } public void createAndShowGUI() { setLayout(new BorderLayout()); setTransferHandler(new PathwayImportHandler()); swingEngine.getEngine().addApplicationEventListener(this); actions = swingEngine.getActions(); toolBar = new JToolBar(); toolBar.setFloatable(false); // disable floatable toolbar, aka Abomination of interaction design. addToolBarActions(swingEngine, toolBar); add(toolBar, BorderLayout.PAGE_START); // menuBar will be added by container (JFrame or JApplet) pathwayScrollPane = new JScrollPane(); // set background color when no VPathway is loaded, override l&f because it is usually white. pathwayScrollPane.getViewport().setBackground(Color.LIGHT_GRAY); propertyTable = new JTable(model) { public TableCellRenderer getCellRenderer(int row, int column) { TableCellRenderer r = model.getCellRenderer(row, column); return r == null ? super.getCellRenderer(row, column) : r; } public TableCellEditor getCellEditor(int row, int column) { TableCellEditor e = model.getCellEditor(row, column); return e == null ? super.getCellEditor(row, column) : e; } }; //TODO: make this prettier, it's not good for the tablemodel to have //a reference to the table. Quick fix for preventing TableCellEditor //to remain open upon selecting a new PathwayElement model.setTable(propertyTable); propertiesScrollPane = new JScrollPane(propertyTable); bpt = new BackpageTextProvider (); bpt.addBackpageHook(new BackpageAttributes(swingEngine.getGdbManager().getCurrentGdb())); bpt.addBackpageHook(new BackpageXrefs(swingEngine.getGdbManager().getCurrentGdb())); backpagePane = new BackpagePane(bpt, swingEngine.getEngine()); backpagePane.addHyperlinkListener(new HyperlinkListener() { public void hyperlinkUpdate(HyperlinkEvent e) { if(e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) { try { MainPanel.this.swingEngine.openUrl(e.getURL()); } catch(UnsupportedOperationException ex) { Logger.log.error("Unable to open URL", ex); JOptionPane.showMessageDialog( MainPanel.this, "No browser launcher specified", "Unable to open link", JOptionPane.ERROR_MESSAGE ); } } } }); sidebarTabbedPane = new JTabbedPane(); sidebarTabbedPane.addTab( "Properties", propertiesScrollPane ); sidebarTabbedPane.addTab( "Backpage", new JScrollPane(backpagePane) ); splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, pathwayScrollPane, sidebarTabbedPane); splitPane.setResizeWeight(1); splitPane.setOneTouchExpandable(true); add(splitPane, BorderLayout.CENTER); Action[] keyStrokeActions = new Action[] { actions.copyAction, actions.pasteAction, }; InputMap im = getInputMap(); ActionMap am = getActionMap(); for(Action a : keyStrokeActions) { im.put((KeyStroke)a.getValue(Action.ACCELERATOR_KEY), a.getValue(Action.NAME)); am.put(a.getValue(Action.NAME), a); } menuBar = new JMenuBar(); addMenuActions(menuBar); } /** * Constructor for this class. Creates the main panel of this application, containing * the main GUI elements (menubar, toolbar, sidepanel, drawing pane). */ public MainPanel(SwingEngine swingEngine) { this(swingEngine, null); } /** * {@link ActionListener} for the Zoom combobox on the toolbar. The user can select one * of the predefined ZoomActions (50%, 100%, 200%, Zoom to fit, etc.), * or enter a number or percentage manually. */ protected class ZoomComboListener implements ActionListener { public void actionPerformed(ActionEvent e){ JComboBox combo = (JComboBox) e.getSource(); Object s = combo.getSelectedItem(); if (s instanceof Action) { ((Action) s).actionPerformed(e); } else if (s instanceof String) { String zs = (String) s; zs=zs.replace("%",""); try { double zf = Double.parseDouble(zs); ZoomAction za = new ZoomAction(swingEngine.getEngine(), zf); za.setEnabled(true); za.actionPerformed(e); } catch (Exception ex) { // Ignore bad input } } } } protected void addToolBarActions(final SwingEngine swingEngine, JToolBar tb) { tb.setLayout(new WrapLayout(1, 1)); addToToolbar(actions.importAction); addToToolbar(actions.exportAction); tb.addSeparator(); addToToolbar(actions.copyAction); addToToolbar(actions.pasteAction); tb.addSeparator(); addToToolbar(actions.undoAction); tb.addSeparator(); addToToolbar(new JLabel("Zoom:", JLabel.LEFT)); JComboBox combo = new JComboBox(actions.zoomActions); combo.setMaximumSize(combo.getPreferredSize()); combo.setEditable(true); combo.setSelectedIndex(5); // 100% combo.addActionListener(new ZoomComboListener()); addToToolbar(combo, TB_GROUP_SHOW_IF_VPATHWAY); tb.addSeparator(); String submenu = "line"; for(Action[] aa : actions.newElementActions) { if(aa.length == 1) { addToToolbar(aa[0]); } else { //This is the line/receptor sub-menu String icon = "newlinemenu.gif"; String tooltip = "Select a line to draw"; if(submenu.equals("receptors")) { //Next one is receptors icon = "newlineshapemenu.gif"; tooltip = "Select a receptor/ligand to draw"; } else { submenu = "receptors"; } DropDownButton lineButton = new DropDownButton( new ImageIcon(Resources.getResourceURL(icon))); lineButton.setToolTipText(tooltip); for(Action a : aa) { lineButton.addComponent(new JMenuItem(a)); } addToToolbar(lineButton, TB_GROUP_SHOW_IF_EDITMODE); lineButton.setEnabled(false); } } tb.addSeparator(); addToToolbar(actions.layoutActions); } public static final String TB_GROUP_SHOW_IF_EDITMODE = "edit"; public static final String TB_GROUP_SHOW_IF_VPATHWAY = "vpathway"; Map<String, List<Component>> toolbarGroups = new HashMap<String, List<Component>>(); public void addToToolbar(Component c, String group) { JToolBar tb = getToolBar(); if(tb == null) { Logger.log.warn("Trying to register toolbar action while no toolbar is available " + "(running in headless mode?)"); return; } tb.add(c); addToToolbarGroup(c, group); } public void addToToolbar(Component c) { addToToolbar(c, null); } public void addToToolbar(Action[] actions) { for(Action a : actions) { addToToolbar(a); } } public JButton addToToolbar(Action a, String group) { if(mayAddAction(a)) { JButton b = getToolBar().add(a); b.setFocusable(false); addToToolbarGroup(b, group); return b; } return null; } public JButton addToToolbar(Action a) { return addToToolbar(a, null); } private void addToToolbarGroup(Component c, String group) { if(group != null) { List<Component> gb = toolbarGroups.get(group); if(gb == null) { toolbarGroups.put(group, gb = new ArrayList<Component>()); } gb.add(c); } } public void addToMenu(Action a, JMenu parent) { if(mayAddAction(a)) { parent.add(a); } } public List<Component> getToolbarGroup(String group) { List<Component> tbg = toolbarGroups.get(group); if(tbg == null) tbg = new ArrayList<Component>(); return tbg; } public JToolBar getToolBar() { return toolBar; } public JScrollPane getScrollPane() { return pathwayScrollPane; } public JSplitPane getSplitPane() { return splitPane; } public BackpagePane getBackpagePane() { return backpagePane; } public void vPathwayEvent(VPathwayEvent e) { VPathway vp = (VPathway)e.getSource(); switch(e.getType()) { case VPathwayEvent.ELEMENT_DOUBLE_CLICKED: VPathwayElement pwe = e.getAffectedElement(); if(pwe instanceof Handle) { pwe = ((Handle)pwe).getParent(); } if(pwe instanceof Graphics && !(pwe instanceof SelectionBox)) { PathwayElement p = ((Graphics)pwe).getPathwayElement(); if(p != null) { PathwayElementDialog.getInstance(swingEngine, p, !vp.isEditMode(), null, this).setVisible(true); } } break; case VPathwayEvent.EDIT_MODE_ON: for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_EDITMODE)) { b.setEnabled(true); } break; case VPathwayEvent.EDIT_MODE_OFF: for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_EDITMODE)) { b.setEnabled(false); } break; } } public void applicationEvent(ApplicationEvent e) { switch(e.getType()) { case ApplicationEvent.VPATHWAY_CREATED: { VPathway vp = (VPathway)e.getSource(); vp.addVPathwayListener(this); vp.addVPathwayListener(pathwayElementMenuListener); for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_VPATHWAY)) { b.setEnabled(true); } } break; case ApplicationEvent.VPATHWAY_DISPOSED: { VPathway vp = (VPathway)e.getSource(); vp.removeVPathwayListener(this); vp.removeVPathwayListener(pathwayElementMenuListener); } break; } } public JMenuBar getMenuBar() { return menuBar; } public JTabbedPane getSideBarTabbedPane() { return sidebarTabbedPane; } public void dispose() { backpagePane.dispose(); } }
src/gui/org/pathvisio/gui/swing/MainPanel.java
// PathVisio, // a tool for data visualization and analysis using Biological Pathways // Copyright 2006-2009 BiGCaT Bioinformatics // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.pathvisio.gui.swing; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.swing.Action; import javax.swing.ActionMap; import javax.swing.ImageIcon; import javax.swing.InputMap; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTabbedPane; import javax.swing.JTable; import javax.swing.JToolBar; import javax.swing.KeyStroke; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import javax.swing.table.TableCellEditor; import javax.swing.table.TableCellRenderer; import org.pathvisio.ApplicationEvent; import org.pathvisio.Engine.ApplicationEventListener; import org.pathvisio.debug.Logger; import org.pathvisio.gui.BackpageTextProvider; import org.pathvisio.gui.BackpageTextProvider.BackpageAttributes; import org.pathvisio.gui.BackpageTextProvider.BackpageXrefs; import org.pathvisio.gui.swing.CommonActions.ZoomAction; import org.pathvisio.gui.swing.dialogs.PathwayElementDialog; import org.pathvisio.gui.swing.dnd.PathwayImportHandler; import org.pathvisio.gui.swing.propertypanel.PathwayTableModel; import org.pathvisio.model.PathwayElement; import org.pathvisio.util.Resources; import org.pathvisio.view.Graphics; import org.pathvisio.view.Handle; import org.pathvisio.view.SelectionBox; import org.pathvisio.view.VPathway; import org.pathvisio.view.VPathwayElement; import org.pathvisio.view.VPathwayEvent; import org.pathvisio.view.VPathwayListener; import com.mammothsoftware.frwk.ddb.DropDownButton; /** * this is the contents of the main window in the WikiPathways applet, * and contains the editor window, side panels, toolbar and menu. * * For the standalone application, the derived class MainPanelStandalone is used. */ public class MainPanel extends JPanel implements VPathwayListener, ApplicationEventListener { private JSplitPane splitPane; protected JToolBar toolBar; private JScrollPane pathwayScrollPane; private JScrollPane propertiesScrollPane; protected JTabbedPane sidebarTabbedPane; protected JMenuBar menuBar; private JTable propertyTable; protected BackpagePane backpagePane; protected BackpageTextProvider bpt; protected CommonActions actions; private final PathwayTableModel model; Set<Action> hideActions; protected SwingEngine swingEngine; private final PathwayElementMenuListener pathwayElementMenuListener; public PathwayElementMenuListener getPathwayElementMenuListener() { return pathwayElementMenuListener; } private boolean mayAddAction(Action a) { return hideActions == null || !hideActions.contains(a); } protected void addMenuActions(JMenuBar mb) { JMenu fileMenu = new JMenu("File"); addToMenu(actions.saveAction, fileMenu); addToMenu(actions.saveAsAction, fileMenu); fileMenu.addSeparator(); addToMenu(actions.importAction, fileMenu); addToMenu(actions.exportAction, fileMenu); fileMenu.addSeparator(); addToMenu(actions.exitAction, fileMenu); JMenu editMenu = new JMenu("Edit"); addToMenu(actions.undoAction, editMenu); addToMenu(actions.copyAction, editMenu); addToMenu(actions.pasteAction, editMenu); editMenu.addSeparator(); JMenu selectionMenu = new JMenu("Selection"); for(Action a : actions.layoutActions) addToMenu(a, selectionMenu); editMenu.add (selectionMenu); JMenu viewMenu = new JMenu("View"); JMenu zoomMenu = new JMenu("Zoom"); viewMenu.add(zoomMenu); for(Action a : actions.zoomActions) addToMenu(a, zoomMenu); JMenu helpMenu = new JMenu("Help"); mb.add(fileMenu); mb.add(editMenu); mb.add(viewMenu); mb.add(helpMenu); } /** * Constructor for this class. Creates the main panel of this application, containing * the main GUI elements (menubar, toolbar, sidepanel, drawing pane). Actions that should * not be added to the menubar and toolbar should be specified in the hideActions parameter * @param hideActions The {@link Action}s that should not be added to the toolbar and menubar */ public MainPanel(SwingEngine swingEngine, Set<Action> hideActions) { this.hideActions = hideActions; this.swingEngine = swingEngine; pathwayElementMenuListener = new PathwayElementMenuListener(swingEngine); model = new PathwayTableModel(swingEngine); } public void createAndShowGUI() { setLayout(new BorderLayout()); setTransferHandler(new PathwayImportHandler()); swingEngine.getEngine().addApplicationEventListener(this); actions = swingEngine.getActions(); toolBar = new JToolBar(); toolBar.setFloatable(false); // disable floatable toolbar, aka Abomination of interaction design. addToolBarActions(swingEngine, toolBar); add(toolBar, BorderLayout.PAGE_START); // menuBar will be added by container (JFrame or JApplet) pathwayScrollPane = new JScrollPane(); // set background color when no VPathway is loaded, override l&f because it is usually white. pathwayScrollPane.getViewport().setBackground(Color.LIGHT_GRAY); propertyTable = new JTable(model) { public TableCellRenderer getCellRenderer(int row, int column) { TableCellRenderer r = model.getCellRenderer(row, column); return r == null ? super.getCellRenderer(row, column) : r; } public TableCellEditor getCellEditor(int row, int column) { TableCellEditor e = model.getCellEditor(row, column); return e == null ? super.getCellEditor(row, column) : e; } }; //TODO: make this prettier, it's not good for the tablemodel to have //a reference to the table. Quick fix for preventing TableCellEditor //to remain open upon selecting a new PathwayElement model.setTable(propertyTable); propertiesScrollPane = new JScrollPane(propertyTable); bpt = new BackpageTextProvider (); bpt.addBackpageHook(new BackpageAttributes(swingEngine.getGdbManager().getCurrentGdb())); bpt.addBackpageHook(new BackpageXrefs(swingEngine.getGdbManager().getCurrentGdb())); backpagePane = new BackpagePane(bpt, swingEngine.getEngine()); backpagePane.addHyperlinkListener(new HyperlinkListener() { public void hyperlinkUpdate(HyperlinkEvent e) { if(e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) { try { MainPanel.this.swingEngine.openUrl(e.getURL()); } catch(UnsupportedOperationException ex) { Logger.log.error("Unable to open URL", ex); JOptionPane.showMessageDialog( MainPanel.this, "No browser launcher specified", "Unable to open link", JOptionPane.ERROR_MESSAGE ); } } } }); sidebarTabbedPane = new JTabbedPane(); sidebarTabbedPane.addTab( "Properties", propertiesScrollPane ); sidebarTabbedPane.addTab( "Backpage", new JScrollPane(backpagePane) ); splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, pathwayScrollPane, sidebarTabbedPane); splitPane.setResizeWeight(1); splitPane.setOneTouchExpandable(true); add(splitPane, BorderLayout.CENTER); Action[] keyStrokeActions = new Action[] { actions.copyAction, actions.pasteAction, }; InputMap im = getInputMap(); ActionMap am = getActionMap(); for(Action a : keyStrokeActions) { im.put((KeyStroke)a.getValue(Action.ACCELERATOR_KEY), a.getValue(Action.NAME)); am.put(a.getValue(Action.NAME), a); } menuBar = new JMenuBar(); addMenuActions(menuBar); } /** * Constructor for this class. Creates the main panel of this application, containing * the main GUI elements (menubar, toolbar, sidepanel, drawing pane). */ public MainPanel(SwingEngine swingEngine) { this(swingEngine, null); } /** * {@link ActionListener} for the Zoom combobox on the toolbar. The user can select one * of the predefined ZoomActions (50%, 100%, 200%, Zoom to fit, etc.), * or enter a number or percentage manually. */ protected class ZoomComboListener implements ActionListener { public void actionPerformed(ActionEvent e){ JComboBox combo = (JComboBox) e.getSource(); Object s = combo.getSelectedItem(); if (s instanceof Action) { ((Action) s).actionPerformed(e); } else if (s instanceof String) { String zs = (String) s; zs=zs.replace("%",""); try { double zf = Double.parseDouble(zs); ZoomAction za = new ZoomAction(swingEngine.getEngine(), zf); za.setEnabled(true); za.actionPerformed(e); } catch (Exception ex) { // Ignore bad input } } } } protected void addToolBarActions(final SwingEngine swingEngine, JToolBar tb) { tb.setLayout(new WrapLayout(1, 1)); addToToolbar(actions.importAction); addToToolbar(actions.exportAction); tb.addSeparator(); addToToolbar(actions.copyAction); addToToolbar(actions.pasteAction); tb.addSeparator(); addToToolbar(actions.undoAction); tb.addSeparator(); addToToolbar(new JLabel("Zoom:", JLabel.LEFT)); JComboBox combo = new JComboBox(actions.zoomActions); combo.setMaximumSize(combo.getPreferredSize()); combo.setEditable(true); combo.setSelectedIndex(5); // 100% combo.addActionListener(new ZoomComboListener()); addToToolbar(combo, TB_GROUP_SHOW_IF_VPATHWAY); tb.addSeparator(); String submenu = "line"; for(Action[] aa : actions.newElementActions) { if(aa.length == 1) { addToToolbar(aa[0]); } else { //This is the line/receptor sub-menu String icon = "newlinemenu.gif"; String tooltip = "Select a line to draw"; if(submenu.equals("receptors")) { //Next one is receptors icon = "newlineshapemenu.gif"; tooltip = "Select a receptor/ligand to draw"; } else { submenu = "receptors"; } DropDownButton lineButton = new DropDownButton( new ImageIcon(Resources.getResourceURL(icon))); lineButton.setToolTipText(tooltip); for(Action a : aa) { lineButton.addComponent(new JMenuItem(a)); } addToToolbar(lineButton, TB_GROUP_SHOW_IF_EDITMODE); lineButton.setEnabled(false); } } tb.addSeparator(); addToToolbar(actions.layoutActions); } public static final String TB_GROUP_SHOW_IF_EDITMODE = "edit"; public static final String TB_GROUP_SHOW_IF_VPATHWAY = "vpathway"; Map<String, List<Component>> toolbarGroups = new HashMap<String, List<Component>>(); public void addToToolbar(Component c, String group) { JToolBar tb = getToolBar(); if(tb == null) { Logger.log.warn("Trying to register toolbar action while no toolbar is available " + "(running in headless mode?)"); return; } tb.add(c); addToToolbarGroup(c, group); } public void addToToolbar(Component c) { addToToolbar(c, null); } public void addToToolbar(Action[] actions) { for(Action a : actions) { addToToolbar(a); } } public JButton addToToolbar(Action a, String group) { if(mayAddAction(a)) { JButton b = getToolBar().add(a); b.setFocusable(false); addToToolbarGroup(b, group); return b; } return null; } public JButton addToToolbar(Action a) { return addToToolbar(a, null); } private void addToToolbarGroup(Component c, String group) { if(group != null) { List<Component> gb = toolbarGroups.get(group); if(gb == null) { toolbarGroups.put(group, gb = new ArrayList<Component>()); } gb.add(c); } } public void addToMenu(Action a, JMenu parent) { if(mayAddAction(a)) { parent.add(a); } } public List<Component> getToolbarGroup(String group) { List<Component> tbg = toolbarGroups.get(group); if(tbg == null) tbg = new ArrayList<Component>(); return tbg; } public JToolBar getToolBar() { return toolBar; } public JScrollPane getScrollPane() { return pathwayScrollPane; } public JSplitPane getSplitPane() { return splitPane; } public BackpagePane getBackpagePane() { return backpagePane; } public void vPathwayEvent(VPathwayEvent e) { VPathway vp = (VPathway)e.getSource(); switch(e.getType()) { case VPathwayEvent.ELEMENT_DOUBLE_CLICKED: VPathwayElement pwe = e.getAffectedElement(); if(pwe instanceof Handle) { pwe = ((Handle)pwe).getParent(); } if(pwe instanceof Graphics && !(pwe instanceof SelectionBox)) { PathwayElement p = ((Graphics)pwe).getPathwayElement(); if(p != null) { PathwayElementDialog.getInstance(swingEngine, p, !vp.isEditMode(), null, this).setVisible(true); } } break; case VPathwayEvent.EDIT_MODE_ON: for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_EDITMODE)) { b.setEnabled(true); } break; case VPathwayEvent.EDIT_MODE_OFF: for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_EDITMODE)) { b.setEnabled(false); } break; } } public void applicationEvent(ApplicationEvent e) { switch(e.getType()) { case ApplicationEvent.VPATHWAY_CREATED: { VPathway vp = (VPathway)e.getSource(); vp.addVPathwayListener(this); vp.addVPathwayListener(pathwayElementMenuListener); for(Component b : getToolbarGroup(TB_GROUP_SHOW_IF_VPATHWAY)) { b.setEnabled(true); } } break; case ApplicationEvent.VPATHWAY_DISPOSED: { VPathway vp = (VPathway)e.getSource(); vp.removeVPathwayListener(this); vp.removeVPathwayListener(pathwayElementMenuListener); } break; } } public JMenuBar getMenuBar() { return menuBar; } public JTabbedPane getSideBarTabbedPane() { return sidebarTabbedPane; } public void dispose() { backpagePane.dispose(); } }
Accessor for model in MainPanel
src/gui/org/pathvisio/gui/swing/MainPanel.java
Accessor for model in MainPanel
Java
apache-2.0
e5be6b0bf9e45bf7f02705ef0db7bea4a2c8548f
0
ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE,ebi-uniprot/QuickGOBE
package uk.ac.ebi.quickgo.index.annotation; import uk.ac.ebi.quickgo.annotation.common.AnnotationDocument; import uk.ac.ebi.quickgo.annotation.common.AnnotationRepository; import uk.ac.ebi.quickgo.common.solr.TemporarySolrDataStore; import uk.ac.ebi.quickgo.index.annotation.coterms.CoTermTemporaryDataStore; import uk.ac.ebi.quickgo.index.common.JobTestRunnerConfig; import java.util.List; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.StepExecution; import org.springframework.batch.test.JobLauncherTestUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.SpringApplicationContextLoader; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasSize; import static org.springframework.test.util.MatcherAssertionErrors.assertThat; import static uk.ac.ebi.quickgo.index.annotation.AnnotationConfig.ANNOTATION_INDEXING_JOB_NAME; import static uk.ac.ebi.quickgo.index.annotation.AnnotationConfig.ANNOTATION_INDEXING_STEP_NAME; import static uk.ac.ebi.quickgo.index.annotation.coterms.CoTermsConfig.CO_TERM_ALL_SUMMARIZATION_STEP; import static uk.ac.ebi.quickgo.index.annotation.coterms.CoTermsConfig.CO_TERM_MANUAL_SUMMARIZATION_STEP; /** * Tests whether Spring Batch is correctly wired up to run the annotation indexing. * * Created 22/04/16 * @author Edd */ @ActiveProfiles(profiles = {"embeddedServer"}) @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration( classes = {AnnotationIndexingConfig.class, JobTestRunnerConfig.class, CoTermTemporaryDataStore.Config.class}, loader = SpringApplicationContextLoader.class) public class AnnotationIndexingBatchIT { @ClassRule public static final CoTermTemporaryDataStore coTermsDataStore = new CoTermTemporaryDataStore(); @ClassRule public static final TemporarySolrDataStore solrDataStore = new TemporarySolrDataStore(); @Autowired private JobLauncherTestUtils jobLauncherTestUtils; @Autowired private AnnotationRepository annotationRepository; @Before public void setUp() { annotationRepository.deleteAll(); } @Test public void successfulIndexingJob() throws Exception { JobExecution jobExecution = jobLauncherTestUtils.launchJob(); assertThat(jobExecution.getJobInstance().getJobName(), is(ANNOTATION_INDEXING_JOB_NAME)); List<StepExecution> jobsSingleStepAsList = jobExecution.getStepExecutions() .stream() .filter(step -> step.getStepName().equals(ANNOTATION_INDEXING_STEP_NAME)) .collect(Collectors.toList()); assertThat(jobsSingleStepAsList, hasSize(1)); StepExecution indexingStep = jobsSingleStepAsList.get(0); assertThat(indexingStep.getReadCount(), is(8)); assertThat(indexingStep.getReadSkipCount(), is(0)); assertThat(indexingStep.getProcessSkipCount(), is(2)); assertThat(indexingStep.getWriteCount(), is(6)); List<String> writtenAnnotationDocGeneProductIds = getGeneProductIdsFromAnnotationDocuments(annotationRepository.findAll()); assertThat(writtenAnnotationDocGeneProductIds, containsInAnyOrder( "IntAct:EBI-10043081", "IntAct:EBI-10043081", "IntAct:EBI-10043081", "IntAct:EBI-10205244", "IntAct:EBI-8801830", "IntAct:EBI-10043089" )); //Manual List<StepExecution> summarizeCoTermManualSteps = jobExecution.getStepExecutions() .stream() .filter(step -> step.getStepName().equals(CO_TERM_MANUAL_SUMMARIZATION_STEP)) .collect(Collectors.toList()); assertThat(summarizeCoTermManualSteps, hasSize(1)); StepExecution coTermsManualStep = summarizeCoTermManualSteps.get(0); assertThat(coTermsManualStep.getReadCount(), is(4)); assertThat(coTermsManualStep.getReadSkipCount(), is(0)); assertThat(coTermsManualStep.getProcessSkipCount(), is(0)); assertThat(coTermsManualStep.getWriteCount(), is(4)); List<StepExecution> summarizeCoTermAllSteps = jobExecution.getStepExecutions() .stream() .filter(step -> step.getStepName().equals(CO_TERM_ALL_SUMMARIZATION_STEP)) .collect(Collectors.toList()); assertThat(summarizeCoTermAllSteps, hasSize(1)); StepExecution coTermsAllStep = summarizeCoTermAllSteps.get(0); assertThat(coTermsAllStep.getReadCount(), is(5)); assertThat(coTermsAllStep.getReadSkipCount(), is(0)); assertThat(coTermsAllStep.getProcessSkipCount(), is(0)); assertThat(coTermsAllStep.getWriteCount(), is(5)); assertThat(coTermsAllStep.getExecutionContext().get("FlatFileItemWriter.written"), is(7L)); //Has finished BatchStatus status = jobExecution.getStatus(); assertThat(status, is(BatchStatus.COMPLETED)); } private List<String> getGeneProductIdsFromAnnotationDocuments(Iterable<AnnotationDocument> repoDocsWritten) { return StreamSupport.stream(repoDocsWritten.spliterator(), false).map(i -> i.geneProductId).collect(Collectors .toList()); } }
indexing/src/test/java/uk/ac/ebi/quickgo/index/annotation/AnnotationIndexingBatchIT.java
package uk.ac.ebi.quickgo.index.annotation; import uk.ac.ebi.quickgo.annotation.common.AnnotationDocument; import uk.ac.ebi.quickgo.annotation.common.AnnotationRepository; import uk.ac.ebi.quickgo.common.solr.TemporarySolrDataStore; import uk.ac.ebi.quickgo.index.annotation.coterms.CoTermTemporaryDataStore; import uk.ac.ebi.quickgo.index.common.JobTestRunnerConfig; import java.util.List; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.StepExecution; import org.springframework.batch.test.JobLauncherTestUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.SpringApplicationContextLoader; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasSize; import static org.springframework.test.util.MatcherAssertionErrors.assertThat; import static uk.ac.ebi.quickgo.index.annotation.AnnotationConfig.ANNOTATION_INDEXING_JOB_NAME; import static uk.ac.ebi.quickgo.index.annotation.AnnotationConfig.ANNOTATION_INDEXING_STEP_NAME; import static uk.ac.ebi.quickgo.index.annotation.coterms.CoTermsConfig.CO_TERM_ALL_SUMMARIZATION_STEP; import static uk.ac.ebi.quickgo.index.annotation.coterms.CoTermsConfig.CO_TERM_MANUAL_SUMMARIZATION_STEP; /** * Tests whether Spring Batch is correctly wired up to run the annotation indexing. * * Created 22/04/16 * @author Edd */ @ActiveProfiles(profiles = {"embeddedServer"}) @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration( classes = {AnnotationConfig.class, JobTestRunnerConfig.class, CoTermTemporaryDataStore.Config.class}, loader = SpringApplicationContextLoader.class) public class AnnotationIndexingBatchIT { @ClassRule public static final CoTermTemporaryDataStore coTermsDataStore = new CoTermTemporaryDataStore(); @ClassRule public static final TemporarySolrDataStore solrDataStore = new TemporarySolrDataStore(); @Autowired private JobLauncherTestUtils jobLauncherTestUtils; @Autowired private AnnotationRepository annotationRepository; @Before public void setUp() { annotationRepository.deleteAll(); } @Test public void successfulIndexingJob() throws Exception { JobExecution jobExecution = jobLauncherTestUtils.launchJob(); assertThat(jobExecution.getJobInstance().getJobName(), is(ANNOTATION_INDEXING_JOB_NAME)); List<StepExecution> jobsSingleStepAsList = jobExecution.getStepExecutions() .stream() .filter(step -> step.getStepName().equals(ANNOTATION_INDEXING_STEP_NAME)) .collect(Collectors.toList()); assertThat(jobsSingleStepAsList, hasSize(1)); StepExecution indexingStep = jobsSingleStepAsList.get(0); assertThat(indexingStep.getReadCount(), is(8)); assertThat(indexingStep.getReadSkipCount(), is(0)); assertThat(indexingStep.getProcessSkipCount(), is(2)); assertThat(indexingStep.getWriteCount(), is(6)); List<String> writtenAnnotationDocGeneProductIds = getGeneProductIdsFromAnnotationDocuments(annotationRepository.findAll()); assertThat(writtenAnnotationDocGeneProductIds, containsInAnyOrder( "IntAct:EBI-10043081", "IntAct:EBI-10043081", "IntAct:EBI-10043081", "IntAct:EBI-10205244", "IntAct:EBI-8801830", "IntAct:EBI-10043089" )); //Manual List<StepExecution> summarizeCoTermManualSteps = jobExecution.getStepExecutions() .stream() .filter(step -> step.getStepName().equals(CO_TERM_MANUAL_SUMMARIZATION_STEP)) .collect(Collectors.toList()); assertThat(summarizeCoTermManualSteps, hasSize(1)); StepExecution coTermsManualStep = summarizeCoTermManualSteps.get(0); assertThat(coTermsManualStep.getReadCount(), is(4)); assertThat(coTermsManualStep.getReadSkipCount(), is(0)); assertThat(coTermsManualStep.getProcessSkipCount(), is(0)); assertThat(coTermsManualStep.getWriteCount(), is(4)); List<StepExecution> summarizeCoTermAllSteps = jobExecution.getStepExecutions() .stream() .filter(step -> step.getStepName().equals(CO_TERM_ALL_SUMMARIZATION_STEP)) .collect(Collectors.toList()); assertThat(summarizeCoTermAllSteps, hasSize(1)); StepExecution coTermsAllStep = summarizeCoTermAllSteps.get(0); assertThat(coTermsAllStep.getReadCount(), is(5)); assertThat(coTermsAllStep.getReadSkipCount(), is(0)); assertThat(coTermsAllStep.getProcessSkipCount(), is(0)); assertThat(coTermsAllStep.getWriteCount(), is(5)); assertThat(coTermsAllStep.getExecutionContext().get("FlatFileItemWriter.written"), is(7L)); //Has finished BatchStatus status = jobExecution.getStatus(); assertThat(status, is(BatchStatus.COMPLETED)); } private List<String> getGeneProductIdsFromAnnotationDocuments(Iterable<AnnotationDocument> repoDocsWritten) { return StreamSupport.stream(repoDocsWritten.spliterator(), false).map(i -> i.geneProductId).collect(Collectors .toList()); } }
Test changes form using AnnotationConfig to AnnotationIndexingConfig.
indexing/src/test/java/uk/ac/ebi/quickgo/index/annotation/AnnotationIndexingBatchIT.java
Test changes form using AnnotationConfig to AnnotationIndexingConfig.
Java
apache-2.0
2d3a1a787db86286aac7a3cacf9525716a1ec71a
0
pruivo/JGroups,slaskawi/JGroups,pferraro/JGroups,rhusar/JGroups,pferraro/JGroups,belaban/JGroups,belaban/JGroups,pruivo/JGroups,TarantulaTechnology/JGroups,slaskawi/JGroups,belaban/JGroups,tristantarrant/JGroups,danberindei/JGroups,ibrahimshbat/JGroups,deepnarsay/JGroups,deepnarsay/JGroups,vjuranek/JGroups,ligzy/JGroups,Sanne/JGroups,slaskawi/JGroups,tristantarrant/JGroups,ibrahimshbat/JGroups,kedzie/JGroups,danberindei/JGroups,ibrahimshbat/JGroups,dimbleby/JGroups,ligzy/JGroups,TarantulaTechnology/JGroups,rvansa/JGroups,vjuranek/JGroups,ibrahimshbat/JGroups,dimbleby/JGroups,rpelisse/JGroups,rvansa/JGroups,Sanne/JGroups,vjuranek/JGroups,kedzie/JGroups,kedzie/JGroups,pruivo/JGroups,rpelisse/JGroups,pferraro/JGroups,TarantulaTechnology/JGroups,dimbleby/JGroups,rhusar/JGroups,danberindei/JGroups,rpelisse/JGroups,rhusar/JGroups,Sanne/JGroups,ligzy/JGroups,deepnarsay/JGroups
package org.jgroups.stack; import org.jgroups.*; import org.jgroups.conf.ClassConfigurator; import org.jgroups.util.TimeScheduler; import org.jgroups.util.Util; import java.util.*; import java.util.concurrent.ThreadFactory; /** * A ProtocolStack manages a number of protocols layered above each other. It creates all * protocol classes, initializes them and, when ready, starts all of them, beginning with the * bottom most protocol. It also dispatches messages received from the stack to registered * objects (e.g. channel, GMP) and sends messages sent by those objects down the stack.<p> * The ProtocolStack makes use of the Configurator to setup and initialize stacks, and to * destroy them again when not needed anymore * @author Bela Ban * @version $Id: ProtocolStack.java,v 1.46 2007/04/02 10:59:05 belaban Exp $ */ public class ProtocolStack extends Protocol implements Transport { private Protocol top_prot=null; private Protocol bottom_prot=null; private final Configurator conf=new Configurator(); private String setup_string; private JChannel channel=null; private boolean stopped=true; public final TimeScheduler timer; protected ThreadGroup timer_thread_group=new ThreadGroup(Util.getGlobalThreadGroup(), "Timers"); public static final int ABOVE=1; // used by insertProtocol() public static final int BELOW=2; // used by insertProtocol() private static final String TIMER_NAME="Timer"; public ProtocolStack(JChannel channel, String setup_string) throws ChannelException { this.setup_string=setup_string; this.channel=channel; ClassConfigurator.getInstance(true); // will create the singleton timer=createTimer(); } /** Only used by Simulator; don't use */ public ProtocolStack() { timer=createTimer(); } public Channel getChannel() { return channel; } public int getTimerThreads() { return timer.getCorePoolSize(); } /** Returns all protocols in a list, from top to bottom. <em>These are not copies of protocols, so modifications will affect the actual instances !</em> */ public Vector<Protocol> getProtocols() { Protocol p; Vector<Protocol> v=new Vector<Protocol>(); p=top_prot; while(p != null) { v.addElement(p); p=p.getDownProtocol(); } return v; } /** * * @return Map<String,Map<key,val>> */ public Map<String,Object> dumpStats() { Protocol p; Map<String,Object> retval=new HashMap<String,Object>(), tmp; String prot_name; p=top_prot; while(p != null) { prot_name=p.getName(); tmp=p.dumpStats(); if(prot_name != null && tmp != null) retval.put(prot_name, tmp); p=p.getDownProtocol(); } return retval; } public String dumpTimerQueue() { return timer.dumpTaskQueue(); } /** * Prints the names of the protocols, from the bottom to top. If include_properties is true, * the properties for each protocol will also be printed. */ public String printProtocolSpec(boolean include_properties) { StringBuilder sb=new StringBuilder(); Protocol prot=top_prot; Properties tmpProps; String name; Map.Entry entry; while(prot != null) { name=prot.getName(); if(name != null) { if("ProtocolStack".equals(name)) break; sb.append(name); if(include_properties) { tmpProps=prot.getProperties(); if(tmpProps != null) { sb.append('\n'); for(Iterator it=tmpProps.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); sb.append(entry).append("\n"); } } } sb.append('\n'); prot=prot.getDownProtocol(); } } return sb.toString(); } public String printProtocolSpecAsXML() { StringBuilder sb=new StringBuilder(); Protocol prot=bottom_prot; Properties tmpProps; String name; Map.Entry entry; int len, max_len=30; sb.append("<config>\n"); while(prot != null) { name=prot.getName(); if(name != null) { if("ProtocolStack".equals(name)) break; sb.append(" <").append(name).append(" "); tmpProps=prot.getProperties(); if(tmpProps != null) { len=name.length(); String s; for(Iterator it=tmpProps.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); s=entry.getKey() + "=\"" + entry.getValue() + "\" "; if(len + s.length() > max_len) { sb.append("\n "); len=8; } sb.append(s); len+=s.length(); } } sb.append("/>\n"); prot=prot.getUpProtocol(); } } sb.append("</config>"); return sb.toString(); } public void setup() throws Exception { if(top_prot == null) { top_prot=conf.setupProtocolStack(setup_string, this); top_prot.setUpProtocol(this); bottom_prot=conf.getBottommostProtocol(top_prot); conf.initProtocolStack(bottom_prot); // calls init() on each protocol, from bottom to top } } /** * Creates a new protocol given the protocol specification. * @param prot_spec The specification of the protocol. Same convention as for specifying a protocol stack. * An exception will be thrown if the class cannot be created. Example: * <pre>"VERIFY_SUSPECT(timeout=1500)"</pre> Note that no colons (:) have to be * specified * @return Protocol The newly created protocol * @exception Exception Will be thrown when the new protocol cannot be created */ public Protocol createProtocol(String prot_spec) throws Exception { return conf.createProtocol(prot_spec, this); } /** * Inserts an already created (and initialized) protocol into the protocol list. Sets the links * to the protocols above and below correctly and adjusts the linked list of protocols accordingly. * Note that this method may change the value of top_prot or bottom_prot. * @param prot The protocol to be inserted. Before insertion, a sanity check will ensure that none * of the existing protocols have the same name as the new protocol. * @param position Where to place the protocol with respect to the neighbor_prot (ABOVE, BELOW) * @param neighbor_prot The name of the neighbor protocol. An exception will be thrown if this name * is not found * @exception Exception Will be thrown when the new protocol cannot be created, or inserted. */ public void insertProtocol(Protocol prot, int position, String neighbor_prot) throws Exception { conf.insertProtocol(prot, position, neighbor_prot, this); } /** * Removes a protocol from the stack. Stops the protocol and readjusts the linked lists of * protocols. * @param prot_name The name of the protocol. Since all protocol names in a stack have to be unique * (otherwise the stack won't be created), the name refers to just 1 protocol. * @exception Exception Thrown if the protocol cannot be stopped correctly. */ public Protocol removeProtocol(String prot_name) throws Exception { return conf.removeProtocol(top_prot, prot_name); } /** Returns a given protocol or null if not found */ public Protocol findProtocol(String name) { Protocol tmp=top_prot; String prot_name; while(tmp != null) { prot_name=tmp.getName(); if(prot_name != null && prot_name.equals(name)) return tmp; tmp=tmp.getDownProtocol(); } return null; } public void destroy() { if(top_prot != null) { conf.destroyProtocolStack(top_prot); // destroys msg queues and threads top_prot=null; } try { timer.stop(); } catch(Exception ex) { } } /** * Start all layers. The {@link Protocol#start()} method is called in each protocol, * <em>from top to bottom</em>. * Each layer can perform some initialization, e.g. create a multicast socket */ public void startStack() throws Exception { if(stopped == false) return; timer.start(); conf.startProtocolStack(top_prot); stopped=false; } /** * Iterates through all the protocols <em>from top to bottom</em> and does the following: * <ol> * <li>Waits until all messages in the down queue have been flushed (ie., size is 0) * <li>Calls stop() on the protocol * </ol> */ public void stopStack() { if(stopped) return; conf.stopProtocolStack(top_prot); stopped=true; } /** * Not needed anymore, just left in here for backwards compatibility with JBoss AS * @deprecated */ public void flushEvents() { } /*--------------------------- Transport interface ------------------------------*/ public void send(Message msg) throws Exception { down(new Event(Event.MSG, msg)); } public Object receive(long timeout) throws Exception { throw new Exception("ProtocolStack.receive(): not implemented !"); } /*------------------------- End of Transport interface ---------------------------*/ /*--------------------------- Protocol functionality ------------------------------*/ public String getName() {return "ProtocolStack";} public Object up(Event evt) { return channel.up(evt); } public Object down(Event evt) { switch(evt.getType()) { case Event.CONNECT: case Event.DISCONNECT: Object retval=top_prot.down(evt); renameTimerThreads(TIMER_NAME); return retval; } if(top_prot != null) return top_prot.down(evt); return null; } /*----------------------- End of Protocol functionality ---------------------------*/ private TimeScheduler createTimer() { ThreadFactory factory=new ThreadFactory() { public Thread newThread(Runnable command) { Thread thread=new Thread(timer_thread_group, command, TIMER_NAME); renameThread(TIMER_NAME, thread); return thread; } }; return new TimeScheduler(factory); } private void renameTimerThreads(String base_name) { if(timer_thread_group == null) return; String cluster_name=getClusterName(); Address local_addr=getLocalAddress(); int num_threads=timer_thread_group.activeCount(); Thread[] timers=new Thread[num_threads]; num_threads=timer_thread_group.enumerate(timers); for(int i=0; i < num_threads; i++) { Thread thread=timers[i]; renameThread(base_name, thread, cluster_name, local_addr); } } private String renameThread(String base_name, Thread runner) { return renameThread(base_name, runner, getClusterName(), getLocalAddress()); } private String renameThread(String base_name, Thread runner, String cluster_name, Address local_addr) { String oldName = null; if(runner!=null){ oldName=runner.getName(); StringBuilder threadName=new StringBuilder(); threadName.append(base_name); if(threadName.length() > 0) threadName.append(','); if(cluster_name == null) cluster_name=getClusterName(); threadName.append(cluster_name); if(threadName.length() > 0) threadName.append(','); if(local_addr == null) local_addr=getLocalAddress(); threadName.append(local_addr); runner.setName(threadName.toString()); } return oldName; } private Address getLocalAddress() { return channel != null? channel.getLocalAddress() : null; } private String getClusterName() { return channel != null? channel.getClusterName() : "n/a"; } }
src/org/jgroups/stack/ProtocolStack.java
package org.jgroups.stack; import org.jgroups.*; import org.jgroups.conf.ClassConfigurator; import org.jgroups.util.TimeScheduler; import org.jgroups.util.Util; import java.util.*; import java.util.concurrent.ThreadFactory; /** * A ProtocolStack manages a number of protocols layered above each other. It creates all * protocol classes, initializes them and, when ready, starts all of them, beginning with the * bottom most protocol. It also dispatches messages received from the stack to registered * objects (e.g. channel, GMP) and sends messages sent by those objects down the stack.<p> * The ProtocolStack makes use of the Configurator to setup and initialize stacks, and to * destroy them again when not needed anymore * @author Bela Ban * @version $Id: ProtocolStack.java,v 1.45 2007/04/02 10:49:39 belaban Exp $ */ public class ProtocolStack extends Protocol implements Transport { private Protocol top_prot=null; private Protocol bottom_prot=null; private final Configurator conf=new Configurator(); private String setup_string; private JChannel channel=null; private boolean stopped=true; public final TimeScheduler timer; protected ThreadGroup timer_thread_group=new ThreadGroup(Util.getGlobalThreadGroup(), "Timers"); public static final int ABOVE=1; // used by insertProtocol() public static final int BELOW=2; // used by insertProtocol() private static final String TIMER_NAME="Timer"; public ProtocolStack(JChannel channel, String setup_string) throws ChannelException { this.setup_string=setup_string; this.channel=channel; ClassConfigurator.getInstance(true); // will create the singleton timer=createTimer(); } /** Only used by Simulator; don't use */ public ProtocolStack() { timer=createTimer(); } public Channel getChannel() { return channel; } public int getTimerThreads() { return timer.getCorePoolSize(); } /** Returns all protocols in a list, from top to bottom. <em>These are not copies of protocols, so modifications will affect the actual instances !</em> */ public Vector getProtocols() { Protocol p; Vector v=new Vector(); p=top_prot; while(p != null) { v.addElement(p); p=p.getDownProtocol(); } return v; } /** * * @return Map<String,Map<key,val>> */ public Map dumpStats() { Protocol p; Map retval=new HashMap(), tmp; String prot_name; p=top_prot; while(p != null) { prot_name=p.getName(); tmp=p.dumpStats(); if(prot_name != null && tmp != null) retval.put(prot_name, tmp); p=p.getDownProtocol(); } return retval; } public String dumpTimerQueue() { return timer.dumpTaskQueue(); } /** * Prints the names of the protocols, from the bottom to top. If include_properties is true, * the properties for each protocol will also be printed. */ public String printProtocolSpec(boolean include_properties) { StringBuilder sb=new StringBuilder(); Protocol prot=top_prot; Properties tmpProps; String name; Map.Entry entry; while(prot != null) { name=prot.getName(); if(name != null) { if("ProtocolStack".equals(name)) break; sb.append(name); if(include_properties) { tmpProps=prot.getProperties(); if(tmpProps != null) { sb.append('\n'); for(Iterator it=tmpProps.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); sb.append(entry).append("\n"); } } } sb.append('\n'); prot=prot.getDownProtocol(); } } return sb.toString(); } public String printProtocolSpecAsXML() { StringBuilder sb=new StringBuilder(); Protocol prot=bottom_prot; Properties tmpProps; String name; Map.Entry entry; int len, max_len=30; sb.append("<config>\n"); while(prot != null) { name=prot.getName(); if(name != null) { if("ProtocolStack".equals(name)) break; sb.append(" <").append(name).append(" "); tmpProps=prot.getProperties(); if(tmpProps != null) { len=name.length(); String s; for(Iterator it=tmpProps.entrySet().iterator(); it.hasNext();) { entry=(Map.Entry)it.next(); s=entry.getKey() + "=\"" + entry.getValue() + "\" "; if(len + s.length() > max_len) { sb.append("\n "); len=8; } sb.append(s); len+=s.length(); } } sb.append("/>\n"); prot=prot.getUpProtocol(); } } sb.append("</config>"); return sb.toString(); } public void setup() throws Exception { if(top_prot == null) { top_prot=conf.setupProtocolStack(setup_string, this); top_prot.setUpProtocol(this); bottom_prot=conf.getBottommostProtocol(top_prot); conf.initProtocolStack(bottom_prot); // calls init() on each protocol, from bottom to top } } /** * Creates a new protocol given the protocol specification. * @param prot_spec The specification of the protocol. Same convention as for specifying a protocol stack. * An exception will be thrown if the class cannot be created. Example: * <pre>"VERIFY_SUSPECT(timeout=1500)"</pre> Note that no colons (:) have to be * specified * @return Protocol The newly created protocol * @exception Exception Will be thrown when the new protocol cannot be created */ public Protocol createProtocol(String prot_spec) throws Exception { return conf.createProtocol(prot_spec, this); } /** * Inserts an already created (and initialized) protocol into the protocol list. Sets the links * to the protocols above and below correctly and adjusts the linked list of protocols accordingly. * Note that this method may change the value of top_prot or bottom_prot. * @param prot The protocol to be inserted. Before insertion, a sanity check will ensure that none * of the existing protocols have the same name as the new protocol. * @param position Where to place the protocol with respect to the neighbor_prot (ABOVE, BELOW) * @param neighbor_prot The name of the neighbor protocol. An exception will be thrown if this name * is not found * @exception Exception Will be thrown when the new protocol cannot be created, or inserted. */ public void insertProtocol(Protocol prot, int position, String neighbor_prot) throws Exception { conf.insertProtocol(prot, position, neighbor_prot, this); } /** * Removes a protocol from the stack. Stops the protocol and readjusts the linked lists of * protocols. * @param prot_name The name of the protocol. Since all protocol names in a stack have to be unique * (otherwise the stack won't be created), the name refers to just 1 protocol. * @exception Exception Thrown if the protocol cannot be stopped correctly. */ public Protocol removeProtocol(String prot_name) throws Exception { return conf.removeProtocol(top_prot, prot_name); } /** Returns a given protocol or null if not found */ public Protocol findProtocol(String name) { Protocol tmp=top_prot; String prot_name; while(tmp != null) { prot_name=tmp.getName(); if(prot_name != null && prot_name.equals(name)) return tmp; tmp=tmp.getDownProtocol(); } return null; } public void destroy() { if(top_prot != null) { conf.destroyProtocolStack(top_prot); // destroys msg queues and threads top_prot=null; } try { timer.stop(); } catch(Exception ex) { } } /** * Start all layers. The {@link Protocol#start()} method is called in each protocol, * <em>from top to bottom</em>. * Each layer can perform some initialization, e.g. create a multicast socket */ public void startStack() throws Exception { if(stopped == false) return; timer.start(); conf.startProtocolStack(top_prot); stopped=false; } /** * Iterates through all the protocols <em>from top to bottom</em> and does the following: * <ol> * <li>Waits until all messages in the down queue have been flushed (ie., size is 0) * <li>Calls stop() on the protocol * </ol> */ public void stopStack() { if(stopped) return; conf.stopProtocolStack(top_prot); stopped=true; } /** * Not needed anymore, just left in here for backwards compatibility with JBoss AS * @deprecated */ public void flushEvents() { } /*--------------------------- Transport interface ------------------------------*/ public void send(Message msg) throws Exception { down(new Event(Event.MSG, msg)); } public Object receive(long timeout) throws Exception { throw new Exception("ProtocolStack.receive(): not implemented !"); } /*------------------------- End of Transport interface ---------------------------*/ /*--------------------------- Protocol functionality ------------------------------*/ public String getName() {return "ProtocolStack";} public Object up(Event evt) { return channel.up(evt); } public Object down(Event evt) { switch(evt.getType()) { case Event.CONNECT: case Event.DISCONNECT: Object retval=top_prot.down(evt); renameTimerThreads(TIMER_NAME); return retval; } if(top_prot != null) return top_prot.down(evt); return null; } /*----------------------- End of Protocol functionality ---------------------------*/ private TimeScheduler createTimer() { ThreadFactory factory=new ThreadFactory() { public Thread newThread(Runnable command) { Thread thread=new Thread(timer_thread_group, command, TIMER_NAME); renameThread(TIMER_NAME, thread); return thread; } }; return new TimeScheduler(factory); } private void renameTimerThreads(String base_name) { if(timer_thread_group == null) return; String cluster_name=getClusterName(); Address local_addr=getLocalAddress(); int num_threads=timer_thread_group.activeCount(); Thread[] timers=new Thread[num_threads]; num_threads=timer_thread_group.enumerate(timers); for(int i=0; i < num_threads; i++) { Thread thread=timers[i]; renameThread(base_name, thread, cluster_name, local_addr); } } private String renameThread(String base_name, Thread runner) { return renameThread(base_name, runner, getClusterName(), getLocalAddress()); } private String renameThread(String base_name, Thread runner, String cluster_name, Address local_addr) { String oldName = null; if(runner!=null){ oldName=runner.getName(); StringBuilder threadName=new StringBuilder(); threadName.append(base_name); if(threadName.length() > 0) threadName.append(','); if(cluster_name == null) cluster_name=getClusterName(); threadName.append(cluster_name); if(threadName.length() > 0) threadName.append(','); if(local_addr == null) local_addr=getLocalAddress(); threadName.append(local_addr); runner.setName(threadName.toString()); } return oldName; } private Address getLocalAddress() { return channel != null? channel.getLocalAddress() : null; } private String getClusterName() { return channel != null? channel.getClusterName() : "n/a"; } }
typed collections
src/org/jgroups/stack/ProtocolStack.java
typed collections
Java
apache-2.0
ec9a8b2c7c6b34ddbea51b461ea6664833556f2a
0
111t8e/h2o-2,h2oai/h2o,vbelakov/h2o,elkingtonmcb/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o-2,111t8e/h2o-2,111t8e/h2o-2,vbelakov/h2o,eg-zhang/h2o-2,rowhit/h2o-2,eg-zhang/h2o-2,rowhit/h2o-2,elkingtonmcb/h2o-2,100star/h2o,calvingit21/h2o-2,h2oai/h2o-2,rowhit/h2o-2,h2oai/h2o,h2oai/h2o,111t8e/h2o-2,eg-zhang/h2o-2,calvingit21/h2o-2,100star/h2o,rowhit/h2o-2,eg-zhang/h2o-2,100star/h2o,calvingit21/h2o-2,111t8e/h2o-2,100star/h2o,elkingtonmcb/h2o-2,h2oai/h2o,calvingit21/h2o-2,111t8e/h2o-2,100star/h2o,eg-zhang/h2o-2,vbelakov/h2o,elkingtonmcb/h2o-2,vbelakov/h2o,eg-zhang/h2o-2,rowhit/h2o-2,h2oai/h2o,eg-zhang/h2o-2,rowhit/h2o-2,h2oai/h2o-2,calvingit21/h2o-2,vbelakov/h2o,vbelakov/h2o,vbelakov/h2o,h2oai/h2o,calvingit21/h2o-2,h2oai/h2o,vbelakov/h2o,111t8e/h2o-2,h2oai/h2o-2,100star/h2o,vbelakov/h2o,h2oai/h2o-2,eg-zhang/h2o-2,calvingit21/h2o-2,eg-zhang/h2o-2,calvingit21/h2o-2,h2oai/h2o,h2oai/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o-2,elkingtonmcb/h2o-2,calvingit21/h2o-2,elkingtonmcb/h2o-2,rowhit/h2o-2,100star/h2o,h2oai/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o-2,h2oai/h2o-2,100star/h2o,h2oai/h2o,eg-zhang/h2o-2,h2oai/h2o,calvingit21/h2o-2,111t8e/h2o-2,vbelakov/h2o,rowhit/h2o-2,rowhit/h2o-2,elkingtonmcb/h2o-2,100star/h2o,111t8e/h2o-2,111t8e/h2o-2,rowhit/h2o-2
package hex; import hex.deeplearning.*; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import water.*; import water.api.QuantilesPage; import water.exec.Env; import water.exec.Exec2; import water.fvec.*; import water.util.FrameUtils; import water.util.Log; import water.util.MRUtils; import java.util.HashSet; public class DeepLearningAutoEncoderTest extends TestUtil { /* Visualize outliers with the following R code (from smalldata/anomaly dir): train <- scan("ecg_discord_train.csv", sep=",") test <- scan("ecg_discord_test.csv", sep=",") plot.ts(train) plot.ts(test) */ static final String PATH = "smalldata/anomaly/ecg_discord_train.csv"; //first 20 points static final String PATH2 = "smalldata/anomaly/ecg_discord_test.csv"; //first 22 points @BeforeClass public static void stall() { stall_till_cloudsize(JUnitRunnerDebug.NODES); } @Test public void run() { long seed = 0xDECAF; Key file_train = NFSFileVec.make(find_test_file(PATH)); Frame train = ParseDataset2.parse(Key.make(), new Key[]{file_train}); Key file_test = NFSFileVec.make(find_test_file(PATH2)); Frame test = ParseDataset2.parse(Key.make(), new Key[]{file_test}); DeepLearning p = new DeepLearning(); p.source = train; p.autoencoder = true; p.response = train.vecs()[0]; //ignored anyway p.classification = false; p.seed = seed; p.hidden = new int[]{20}; p.adaptive_rate = true; p.l1 = 1e-4; // p.l2 = 1e-4; // p.rate = 1e-5; p.activation = DeepLearning.Activation.Tanh; p.loss = DeepLearning.Loss.MeanSquare; // p.initial_weight_distribution = DeepLearning.InitialWeightDistribution.Normal; // p.initial_weight_scale = 1e-3; p.epochs = 100; // p.shuffle_training_data = true; p.force_load_balance = true; p.invoke(); DeepLearningModel mymodel = UKV.get(p.dest()); // Verification of results StringBuilder sb = new StringBuilder(); sb.append("Verifying results."); // Training data // Reconstruct data using the same helper functions and verify that self-reported MSE agrees double quantile = 0.95; final Frame l2_frame_train = mymodel.scoreAutoEncoder(train); final Vec l2_train = l2_frame_train.anyVec(); sb.append("Mean reconstruction error: " + l2_train.mean() + "\n"); Assert.assertEquals(mymodel.mse(), l2_train.mean(), 1e-7); // manually compute L2 Frame reconstr = mymodel.score(train); //this creates real values in original space double mean_l2 = 0; for (int r=0; r<reconstr.numRows(); ++r) { double my_l2 = 0; for (int c = 0; c < reconstr.numCols(); ++c) { my_l2 += Math.pow((reconstr.vec(c).at(r) - train.vec(c).at(r)) * mymodel.model_info().data_info()._normMul[c], 2); //undo normalization here } my_l2 /= reconstr.numCols(); mean_l2 += my_l2; } mean_l2 /= reconstr.numRows(); reconstr.delete(); sb.append("Mean reconstruction error (train): " + l2_train.mean() + "\n"); Assert.assertEquals(mymodel.mse(), mean_l2, 1e-7); // print stats and potential outliers sb.append("The following training points are reconstructed with an error above the " + quantile*100 + "-th percentile - check for \"goodness\" of training data.\n"); double thresh_train = mymodel.calcOutlierThreshold(l2_train, quantile); for( long i=0; i<l2_train.length(); i++ ) { if (l2_train.at(i) > thresh_train) { sb.append(String.format("row %d : l2_train error = %5f\n", i, l2_train.at(i))); } } // Test data // Reconstruct data using the same helper functions and verify that self-reported MSE agrees final Frame l2_frame_test = mymodel.scoreAutoEncoder(test); final Vec l2_test = l2_frame_test.anyVec(); double mult = 10; double thresh_test = mult*thresh_train; sb.append("\nFinding outliers.\n"); sb.append("Mean reconstruction error (test): " + l2_test.mean() + "\n"); // print stats and potential outliers sb.append("The following test points are reconstructed with an error greater than " + mult + " times the mean reconstruction error of the training data:\n"); HashSet<Long> outliers = new HashSet<Long>(); for( long i=0; i<l2_test.length(); i++ ) { if (l2_test.at(i) > thresh_test) { outliers.add(i); sb.append(String.format("row %d : l2 error = %5f\n", i, l2_test.at(i))); } } Log.info(sb); // check that the all outliers are found (and nothing else) Assert.assertTrue(outliers.contains(new Long(20))); Assert.assertTrue(outliers.contains(new Long(21))); Assert.assertTrue(outliers.contains(new Long(22))); Assert.assertTrue(outliers.size() == 3); // cleanup p.delete(); mymodel.delete(); train.delete(); l2_frame_train.delete(); test.delete(); l2_frame_test.delete(); } }
src/test/java/hex/DeepLearningAutoEncoderTest.java
package hex; import hex.deeplearning.*; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import water.*; import water.api.QuantilesPage; import water.exec.Env; import water.exec.Exec2; import water.fvec.*; import water.util.FrameUtils; import water.util.Log; import water.util.MRUtils; import java.util.HashSet; public class DeepLearningAutoEncoderTest extends TestUtil { /* Visualize outliers with the following R code (from smalldata/anomaly dir): train <- scan("ecg_discord_train.csv", sep=",") test <- scan("ecg_discord_test.csv", sep=",") plot.ts(train) plot.ts(test) */ static final String PATH = "smalldata/anomaly/ecg_discord_train.csv"; //first 20 points static final String PATH2 = "smalldata/anomaly/ecg_discord_test.csv"; //first 22 points @BeforeClass public static void stall() { stall_till_cloudsize(JUnitRunnerDebug.NODES); } @Test public void run() { long seed = 0xDECAF; Key file_train = NFSFileVec.make(find_test_file(PATH)); Frame train = ParseDataset2.parse(Key.make(), new Key[]{file_train}); Key file_test = NFSFileVec.make(find_test_file(PATH2)); Frame test = ParseDataset2.parse(Key.make(), new Key[]{file_test}); DeepLearning p = new DeepLearning(); p.source = train; p.autoencoder = true; p.response = train.vecs()[0]; //ignored anyway p.classification = false; p.seed = seed; p.hidden = new int[]{20}; p.adaptive_rate = true; p.l1 = 1e-4; // p.l2 = 1e-4; // p.rate = 1e-5; p.activation = DeepLearning.Activation.Tanh; p.loss = DeepLearning.Loss.MeanSquare; // p.initial_weight_distribution = DeepLearning.InitialWeightDistribution.Normal; // p.initial_weight_scale = 1e-3; p.epochs = 100; // p.shuffle_training_data = true; p.force_load_balance = false; p.invoke(); DeepLearningModel mymodel = UKV.get(p.dest()); // Verification of results StringBuilder sb = new StringBuilder(); sb.append("Verifying results."); // Training data // Reconstruct data using the same helper functions and verify that self-reported MSE agrees double quantile = 0.95; final Frame l2_frame_train = mymodel.scoreAutoEncoder(train); final Vec l2_train = l2_frame_train.anyVec(); double thresh_train = mymodel.calcOutlierThreshold(l2_train, quantile); sb.append("Mean reconstruction error: " + l2_train.mean() + "\n"); Assert.assertEquals(mymodel.mse(), l2_train.mean(), 1e-6); // manually compute L2 Frame reconstr = mymodel.score(train); double mean_l2 = 0; for (int r=0; r<reconstr.numRows(); ++r) { double my_l2 = 0; for (int c = 0; c < reconstr.numCols(); ++c) { my_l2 += Math.pow((reconstr.vec(c).at(r) - train.vec(c).at(r)) * mymodel.model_info().data_info()._normMul[c], 2); } mean_l2 += my_l2; } mean_l2 /= reconstr.numRows(); reconstr.delete(); sb.append("Mean reconstruction error (train): " + l2_train.mean() + "\n"); Assert.assertEquals(mymodel.mse(), mean_l2, 1e-6); // print stats and potential outliers sb.append("The following training points are reconstructed with an error above the " + quantile*100 + "-th percentile - check for \"goodness\" of training data.\n"); for( long i=0; i<l2_train.length(); i++ ) { if (l2_train.at(i) > thresh_train) { sb.append(String.format("row %d : l2_train error = %5f\n", i, l2_train.at(i))); } } // Test data // Reconstruct data using the same helper functions and verify that self-reported MSE agrees final Frame l2_frame_test = mymodel.scoreAutoEncoder(test); final Vec l2_test = l2_frame_test.anyVec(); double mult = 10; double thresh_test = mult*thresh_train; sb.append("\nFinding outliers.\n"); sb.append("Mean reconstruction error (test): " + l2_test.mean() + "\n"); // print stats and potential outliers sb.append("The following test points are reconstructed with an error greater than " + mult + " times the mean reconstruction error of the training data:\n"); HashSet<Long> outliers = new HashSet<Long>(); for( long i=0; i<l2_test.length(); i++ ) { if (l2_test.at(i) > thresh_test) { outliers.add(i); sb.append(String.format("row %d : l2 error = %5f\n", i, l2_test.at(i))); } } Log.info(sb); // check that the all outliers are found (and nothing else) Assert.assertTrue(outliers.contains(new Long(20))); Assert.assertTrue(outliers.contains(new Long(21))); Assert.assertTrue(outliers.contains(new Long(22))); Assert.assertTrue(outliers.size() == 3); // cleanup p.delete(); mymodel.delete(); train.delete(); l2_frame_train.delete(); test.delete(); l2_frame_test.delete(); } }
PUB-603: Update JUnit test of auto-encoder on numerical data to latest changes.
src/test/java/hex/DeepLearningAutoEncoderTest.java
PUB-603: Update JUnit test of auto-encoder on numerical data to latest changes.
Java
apache-2.0
c0e01b12476a5821dfb827768376ee02cad265ca
0
wanggc/mongo-java-driver,jyemin/mongo-java-driver,davydotcom/mongo-java-driver,PSCGroup/mongo-java-driver,kay-kim/mongo-java-driver,rozza/mongo-java-driver,kevinsawicki/mongo-java-driver,wanggc/mongo-java-driver,gianpaj/mongo-java-driver,jyemin/mongo-java-driver,davydotcom/mongo-java-driver,rozza/mongo-java-driver,jsonking/mongo-java-driver,kevinsawicki/mongo-java-driver,jsonking/mongo-java-driver,kevinsawicki/mongo-java-driver
// DBCursor.java /** * Copyright (C) 2008 10gen Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; /** An iterator over database results. * Doing a <code>find()</code> query on a collection returns a * <code>DBCursor</code> thus * * <blockquote><pre> * DBCursor cursor = collection.find( query ); * if( cursor.hasNext() ) * DBObject obj = cursor.next(); * </pre></blockquote> * * <p><b>Warning:</b> Calling <code>toArray</code> or <code>length</code> on * a DBCursor will irrevocably turn it into an array. This * means that, if the cursor was iterating over ten million results * (which it was lazily fetching from the database), suddenly there will * be a ten-million element array in memory. Before converting to an array, * make sure that there are a reasonable number of results using * <code>skip()</code> and <code>limit()</code>. * <p>For example, to get an array of the 1000-1100th elements of a cursor, use * * <blockquote><pre> * List<DBObject> obj = collection.find( query ).skip( 1000 ).limit( 100 ).toArray(); * </pre></blockquote> * * @dochub cursors */ public class DBCursor implements Iterator<DBObject> , Iterable<DBObject> { /** * Initializes a new database cursor * @param collection collection to use * @param q query to perform * @param k keys to return from the query */ public DBCursor( DBCollection collection , DBObject q , DBObject k ){ _collection = collection; _query = q == null ? new BasicDBObject() : q; _keysWanted = k; } /** Types of cursors: iterator or array. */ static enum CursorType { ITERATOR , ARRAY }; /** * Creates a copy of an existing database cursor. * The new cursor is an iterator, even if the original * was an array. * * @return the new cursor */ public DBCursor copy() { DBCursor c = new DBCursor(_collection, _query, _keysWanted); c._orderBy = _orderBy; c._hint = _hint; c._numWanted = _numWanted; c._skip = _skip; c._options = _options; return c; } public Iterator<DBObject> iterator(){ return this.copy(); } // ---- querty modifiers -------- /** * Sorts this cursor's elements. * @param orderBy the fields on which to sort * @return a cursor pointing to the first element of the sorted results */ public DBCursor sort( DBObject orderBy ){ if ( _it != null ) throw new IllegalStateException( "can't sort after executing query" ); _orderBy = orderBy; return this; } /** * Informs the database of indexed fields of the collection in order to improve performance. * @param indexKeys an <code>DBObject</code> with index names as keys * @return same DBCursor for chaining operations */ public DBCursor hint( DBObject indexKeys ){ if ( _it != null ) throw new IllegalStateException( "can't hint after executing query" ); if ( indexKeys == null ) _hint = null; else _hint = DBCollection.genIndexName( indexKeys ); return this; } /** * Use snapshot mode for the query. Snapshot mode assures no duplicates are * returned, or objects missed, which were present at both the start and end * of the query's execution (if an object is new during the query, or deleted * during the query, it may or may not be returned, even with snapshot mode). * Note that short query responses (less than 1MB) are always effectively snapshotted. * Currently, snapshot mode may not be used with sorting or explicit hints. * @return same DBCursor for chaining operations */ public DBCursor snapshot() { if (_it != null) throw new IllegalStateException("can't snapshot after executing the query"); _snapshot = true; return this; } /** * Informs the database of an indexed field of the collection in order to improve performance. * @param indexName the name of an index * @return same DBCursort for chaining operations */ public DBCursor hint( String indexName ){ if ( _it != null ) throw new IllegalStateException( "can't hint after executing query" ); _hint = indexName; return this; } /** * Returns an object containing basic information about the * exectution of the query that created this cursor * This creates a <code>DBObject</code> with the key/value pairs: * "cursor" : cursor type * "nScanned" : number of records examined by the database for this query * "n" : the number of records that the database returned * "millis" : how long it took the database to execute the query * @return a <code>DBObject</code> * @dochub explain */ public DBObject explain(){ DBCursor c = copy(); c._explain = true; c._numWanted = c._numWanted * -1; return c.next(); } /** * Limits the number of elements returned. * @param n the number of elements to return * @return a cursor pointing to the first element of the limited results * @dochub limit */ public DBCursor limit( int n ){ if ( _it != null ) throw new IllegalStateException( "can't set limit after executing query" ); _numWanted = n; return this; } /** * Limits the number of elements returned in one batch * @param n the number of elements to return in a batch */ public DBCursor batchSize( int n ){ if ( n < 1 ) throw new IllegalArgumentException( "batchSize has to be >= 1" ); if ( n == 1 ) n = 2; if ( _it != null ) throw new IllegalStateException( "can't set batch size after executing query" ); _batchSize = n; return this; } /** * Discards a given number of elements at the beginning of the cursor. * @param n the number of elements to skip * @return a cursor pointing to the new first element of the results * @throws RuntimeException if the cursor has started to be iterated through */ public DBCursor skip( int n ){ if ( _it != null ) throw new IllegalStateException( "can't set skip after executing query" ); _skip = n; return this; } /** * adds an option - see Bytes.QUERYOPTION_* for list */ public DBCursor addOption( int option ){ _options |= option; return this; } // ---- internal stuff ------ private void _check() throws MongoException { if ( _it != null ) return; if ( _collection != null && _query != null ){ _lookForHints(); DBObject foo = _query; if ( hasSpecialQueryFields() ){ foo = new BasicDBObject(); _addToQueryObject( foo , "query" , _query , true ); _addToQueryObject( foo , "orderby" , _orderBy , false ); _addToQueryObject( foo , "$hint" , _hint ); if ( _explain ) foo.put( "$explain" , true ); if ( _snapshot ) foo.put( "$snapshot", true ); } int bs = _numWanted; if ( _batchSize > 0 ){ if ( _numWanted == 0 ) bs = _batchSize; else bs = Math.min( bs , _batchSize ); } _it = _collection.find( foo , _keysWanted , _skip , bs , _options ); } if ( _it == null ){ _it = (new LinkedList<DBObject>()).iterator(); _fake = true; } } /** * if there is a hint to use, use it */ private void _lookForHints(){ if ( _hint != null ) // if someone set a hint, then don't do this return; if ( _collection._hintFields == null ) return; Set<String> mykeys = _query.keySet(); for ( DBObject o : _collection._hintFields ){ Set<String> hintKeys = o.keySet(); if ( ! mykeys.containsAll( hintKeys ) ) continue; hint( o ); return; } } boolean hasSpecialQueryFields(){ if ( _orderBy != null && _orderBy.keySet().size() > 0 ) return true; if ( _hint != null ) return true; return _explain; } void _addToQueryObject( DBObject query , String field , DBObject thing , boolean sendEmpty ){ if ( thing == null ) return; if ( ! sendEmpty && thing.keySet().size() == 0 ) return; _addToQueryObject( query , field , thing ); } void _addToQueryObject( DBObject query , String field , Object thing ){ if ( thing == null ) return; query.put( field , thing ); } void _checkType( CursorType type ){ if ( _cursorType == null ){ _cursorType = type; return; } if ( type == _cursorType ) return; throw new IllegalArgumentException( "can't switch cursor access methods" ); } private DBObject _next() throws MongoException { if ( _cursorType == null ) _checkType( CursorType.ITERATOR ); _check(); _cur = null; _cur = _it.next(); _collection.apply( _cur , false ); _num++; if ( _keysWanted != null && _keysWanted.keySet().size() > 0 ){ _cur.markAsPartialObject(); //throw new UnsupportedOperationException( "need to figure out partial" ); } if ( _cursorType == CursorType.ARRAY ){ _nums.add( String.valueOf( _all.size() ) ); _all.add( _cur ); } return _cur; } public int numGetMores(){ if ( _fake ) return 0; if ( _it instanceof DBApiLayer.Result ) return ((DBApiLayer.Result)_it).numGetMores(); throw new IllegalArgumentException("_it not a real result" ); } public List<Integer> getSizes(){ if ( _fake ) return new LinkedList<Integer>(); if ( _it instanceof DBApiLayer.Result ) return ((DBApiLayer.Result)_it).getSizes(); throw new IllegalArgumentException("_it not a real result" ); } private boolean _hasNext() throws MongoException { _check(); if ( _numWanted > 0 && _num >= _numWanted ) return false; return _it.hasNext(); } /** * Returns the number of objects through which the cursor has iterated. * @return the number of objects seen */ public int numSeen(){ return _num; } // ----- iterator api ----- /** * Checks if there is another element. * @return if there is another element */ public boolean hasNext(){ _checkType( CursorType.ITERATOR ); try { return _hasNext(); } catch ( MongoException e ){ throw new MongoInternalException( "couldn't get next element" , e ); } } /** * Returns the element the cursor is at and moves the cursor ahead by one. * @return the next element */ public DBObject next(){ _checkType( CursorType.ITERATOR ); try { return _next(); } catch ( MongoException e ){ throw new MongoInternalException( "couldn't get next element" , e ); } } /** * Returns the element the cursor is at. * @return the next element */ public DBObject curr(){ _checkType( CursorType.ITERATOR ); return _cur; } /** * Unimplemented. */ public void remove(){ throw new UnsupportedOperationException( "can't remove from a cursor" ); } // ---- array api ----- void _fill( int n ) throws MongoException { _checkType( CursorType.ARRAY ); while ( n >= _all.size() && _hasNext() ) _next(); } /** Finds the number of elements in the array. * @return the number of elements in the array */ public int length() throws MongoException { _checkType( CursorType.ARRAY ); _fill( Integer.MAX_VALUE ); return _all.size(); } /** Converts this cursor to an array. * @return an array of elements */ public List<DBObject> toArray() throws MongoException { return toArray( Integer.MAX_VALUE ); } /** * Converts this cursor to an array. If there are more than a given number of elements in the resulting array, only return the first <tt>min</tt>. * @param min the minimum size of the array to return * @return an array of elements */ public List<DBObject> toArray( int min ) throws MongoException { _checkType( CursorType.ARRAY ); _fill( min ); return _all; } /** * for testing only! * iterates cursor and counts objects * @return num objects */ public int itcount(){ int n = 0; while ( this.hasNext() ){ this.next(); n++; } return n; } /** * Counts the number of elements in this cursor. * @return the number of elements */ public int count() throws MongoException { if ( _collection == null ) throw new IllegalArgumentException( "why is _collection null" ); if ( _collection._db == null ) throw new IllegalArgumentException( "why is _collection._db null" ); return (int)_collection.getCount(this._query, this._keysWanted); } public DBObject getKeysWanted(){ return _keysWanted; } public DBObject getQuery(){ return _query; } // ---- query setup ---- private final DBCollection _collection; private final DBObject _query; private final DBObject _keysWanted; private DBObject _orderBy = null; private String _hint = null; private boolean _explain = false; private int _numWanted = 0; private int _batchSize = 0; private int _skip = 0; private boolean _snapshot = false; private int _options = 0; // ---- result info ---- private Iterator<DBObject> _it = null; private boolean _fake = false; private CursorType _cursorType = null; private DBObject _cur = null; private int _num = 0; private final ArrayList<DBObject> _all = new ArrayList<DBObject>(); private final List<String> _nums = new ArrayList<String>(); }
src/main/com/mongodb/DBCursor.java
// DBCursor.java /** * Copyright (C) 2008 10gen Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mongodb; import java.util.Iterator; import java.util.LinkedList; import java.util.Set; import java.util.List; import java.util.ArrayList; import java.util.Collections; /** An iterator over database results. * Doing a <code>find()</code> query on a collection returns a * <code>DBCursor</code> thus * * <blockquote><pre> * DBCursor cursor = collection.find( query ); * if( cursor.hasNext() ) * DBObject obj = cursor.next(); * </pre></blockquote> * * <p><b>Warning:</b> Calling <code>toArray</code> or <code>length</code> on * a DBCursor will irrevocably turn it into an array. This * means that, if the cursor was iterating over ten million results * (which it was lazily fetching from the database), suddenly there will * be a ten-million element array in memory. Before converting to an array, * make sure that there are a reasonable number of results using * <code>skip()</code> and <code>limit()</code>. * <p>For example, to get an array of the 1000-1100th elements of a cursor, use * * <blockquote><pre> * List<DBObject> obj = collection.find( query ).skip( 1000 ).limit( 100 ).toArray(); * </pre></blockquote> * * @dochub cursors */ public class DBCursor implements Iterator<DBObject> , Iterable<DBObject> { /** * Initializes a new database cursor * @param collection collection to use * @param q query to perform * @param k keys to return from the query */ public DBCursor( DBCollection collection , DBObject q , DBObject k ){ _collection = collection; _query = q == null ? new BasicDBObject() : q; _keysWanted = k; } /** Types of cursors: iterator or array. */ static enum CursorType { ITERATOR , ARRAY }; /** * Creates a copy of an existing database cursor. * The new cursor is an iterator, even if the original * was an array. * * @return the new cursor */ public DBCursor copy() { DBCursor c = new DBCursor(_collection, _query, _keysWanted); c._orderBy = _orderBy; c._hint = _hint; c._numWanted = _numWanted; c._skip = _skip; c._options = _options; return c; } public Iterator<DBObject> iterator(){ return this.copy(); } // ---- querty modifiers -------- /** * Sorts this cursor's elements. * @param orderBy the fields on which to sort * @return a cursor pointing to the first element of the sorted results */ public DBCursor sort( DBObject orderBy ){ if ( _it != null ) throw new IllegalStateException( "can't sort after executing query" ); _orderBy = orderBy; return this; } /** * Informs the database of indexed fields of the collection in order to improve performance. * @param indexKeys an <code>DBObject</code> with index names as keys * @return same DBCursor for chaining operations */ public DBCursor hint( DBObject indexKeys ){ if ( _it != null ) throw new IllegalStateException( "can't hint after executing query" ); if ( indexKeys == null ) _hint = null; else _hint = DBCollection.genIndexName( indexKeys ); return this; } /** * Use snapshot mode for the query. Snapshot mode assures no duplicates are * returned, or objects missed, which were present at both the start and end * of the query's execution (if an object is new during the query, or deleted * during the query, it may or may not be returned, even with snapshot mode). * Note that short query responses (less than 1MB) are always effectively snapshotted. * Currently, snapshot mode may not be used with sorting or explicit hints. * @return same DBCursor for chaining operations */ public DBCursor snapshot() { if (_it != null) throw new IllegalStateException("can't snapshot after executing the query"); _snapshot = true; return this; } /** * Informs the database of an indexed field of the collection in order to improve performance. * @param indexName the name of an index * @return same DBCursort for chaining operations */ public DBCursor hint( String indexName ){ if ( _it != null ) throw new IllegalStateException( "can't hint after executing query" ); _hint = indexName; return this; } /** * Returns an object containing basic information about the * exectution of the query that created this cursor * This creates a <code>DBObject</code> with the key/value pairs: * "cursor" : cursor type * "nScanned" : number of records examined by the database for this query * "n" : the number of records that the database returned * "millis" : how long it took the database to execute the query * @return a <code>DBObject</code> * @dochub explain */ public DBObject explain(){ DBCursor c = copy(); c._explain = true; c._numWanted = c._numWanted * -1; return c.next(); } /** * Limits the number of elements returned. * @param n the number of elements to return * @return a cursor pointing to the first element of the limited results * @dochub limit */ public DBCursor limit( int n ){ if ( _it != null ) throw new IllegalStateException( "can't set limit after executing query" ); _numWanted = n; return this; } /** * Limits the number of elements returned in one batch * @param n the number of elements to return in a batch */ public DBCursor batchSize( int n ){ if ( n < 1 ) throw new IllegalArgumentException( "batchSize has to be >= 1" ); if ( n == 1 ) n = 2; if ( _it != null ) throw new IllegalStateException( "can't set batch size after executing query" ); _batchSize = n; return this; } /** * Discards a given number of elements at the beginning of the cursor. * @param n the number of elements to skip * @return a cursor pointing to the new first element of the results * @throws RuntimeException if the cursor has started to be iterated through */ public DBCursor skip( int n ){ if ( _it != null ) throw new IllegalStateException( "can't set skip after executing query" ); _skip = n; return this; } /** * adds an option - see Bytes.QUERYOPTION_* for list */ public DBCursor addOption( int option ){ _options |= option; return this; } // ---- internal stuff ------ private void _check() throws MongoException { if ( _it != null ) return; if ( _collection != null && _query != null ){ _lookForHints(); DBObject foo = _query; if ( hasSpecialQueryFields() ){ foo = new BasicDBObject(); _addToQueryObject( foo , "query" , _query , true ); _addToQueryObject( foo , "orderby" , _orderBy , false ); _addToQueryObject( foo , "$hint" , _hint ); if ( _explain ) foo.put( "$explain" , true ); if ( _snapshot ) foo.put( "$snapshot", true ); } int bs = _numWanted; if ( _batchSize > 0 ){ if ( _numWanted == 0 ) bs = _batchSize; else bs = Math.min( bs , _batchSize ); } _it = _collection.find( foo , _keysWanted , _skip , bs , _options ); } if ( _it == null ){ _it = (new LinkedList<DBObject>()).iterator(); _fake = true; } } /** * if there is a hint to use, use it */ private void _lookForHints(){ if ( _hint != null ) // if someone set a hint, then don't do this return; if ( _collection._hintFields == null ) return; Set<String> mykeys = _query.keySet(); for ( DBObject o : _collection._hintFields ){ Set<String> hintKeys = o.keySet(); if ( ! mykeys.containsAll( hintKeys ) ) continue; hint( o ); return; } } boolean hasSpecialQueryFields(){ if ( _orderBy != null && _orderBy.keySet().size() > 0 ) return true; if ( _hint != null ) return true; return _explain; } void _addToQueryObject( DBObject query , String field , DBObject thing , boolean sendEmpty ){ if ( thing == null ) return; if ( ! sendEmpty && thing.keySet().size() == 0 ) return; _addToQueryObject( query , field , thing ); } void _addToQueryObject( DBObject query , String field , Object thing ){ if ( thing == null ) return; query.put( field , thing ); } void _checkType( CursorType type ){ if ( _cursorType == null ){ _cursorType = type; return; } if ( type == _cursorType ) return; throw new IllegalArgumentException( "can't switch cursor access methods" ); } private DBObject _next() throws MongoException { if ( _cursorType == null ) _checkType( CursorType.ITERATOR ); _check(); _cur = null; _cur = _it.next(); _collection.apply( _cur , false ); _num++; if ( _keysWanted != null && _keysWanted.keySet().size() > 0 ){ _cur.markAsPartialObject(); //throw new UnsupportedOperationException( "need to figure out partial" ); } if ( _cursorType == CursorType.ARRAY ){ _nums.add( String.valueOf( _all.size() ) ); _all.add( _cur ); } return _cur; } public int numGetMores(){ if ( _fake ) return 0; if ( _it instanceof DBApiLayer.Result ) return ((DBApiLayer.Result)_it).numGetMores(); throw new IllegalArgumentException("_it not a real result" ); } public List<Integer> getSizes(){ if ( _fake ) return new LinkedList<Integer>(); if ( _it instanceof DBApiLayer.Result ) return ((DBApiLayer.Result)_it).getSizes(); throw new IllegalArgumentException("_it not a real result" ); } private boolean _hasNext() throws MongoException { _check(); if ( _numWanted > 0 && _num >= _numWanted ) return false; return _it.hasNext(); } /** * Returns the number of objects through which the cursor has iterated. * @return the number of objects seen */ public int numSeen(){ return _num; } // ----- iterator api ----- /** * Checks if there is another element. * @return if there is another element */ public boolean hasNext(){ _checkType( CursorType.ITERATOR ); try { return _hasNext(); } catch ( MongoException e ){ throw new MongoInternalException( "couldn't get next element" , e ); } } /** * Returns the element the cursor is at and moves the cursor ahead by one. * @return the next element */ public DBObject next(){ _checkType( CursorType.ITERATOR ); try { return _next(); } catch ( MongoException e ){ throw new MongoInternalException( "couldn't get next element" , e ); } } /** * Returns the element the cursor is at. * @return the next element */ public DBObject curr(){ _checkType( CursorType.ITERATOR ); return _cur; } /** * Unimplemented. */ public void remove(){ throw new UnsupportedOperationException( "can't remove from a cursor" ); } // ---- array api ----- void _fill( int n ) throws MongoException { _checkType( CursorType.ARRAY ); while ( n >= _all.size() && _hasNext() ) _next(); } /** Finds the number of elements in the array. * @return the number of elements in the array */ public int length() throws MongoException { _checkType( CursorType.ARRAY ); _fill( Integer.MAX_VALUE ); return _all.size(); } /** Converts this cursor to an array. * @return an array of elements */ public List<DBObject> toArray() throws MongoException { return toArray( Integer.MAX_VALUE ); } /** * Converts this cursor to an array. If there are more than a given number of elements in the resulting array, only return the first <tt>min</tt>. * @param min the minimum size of the array to return * @return an array of elements */ public List<DBObject> toArray( int min ) throws MongoException { _checkType( CursorType.ARRAY ); _fill( min ); return _all; } /** * for testing only! * iterates cursor and counts objects * @return num objects */ public int itcount(){ int n = 0; while ( this.hasNext() ){ this.next(); n++; } return n; } /** * Counts the number of elements in this cursor. * @return the number of elements */ public int count() throws MongoException { if ( _collection == null ) throw new IllegalArgumentException( "why is _collection null" ); if ( _collection._db == null ) throw new IllegalArgumentException( "why is _collection._db null" ); return (int)_collection.getCount(this._query, this._keysWanted); } public DBObject getKeysWanted(){ return _keysWanted; } // ---- query setup ---- final DBCollection _collection; final DBObject _query; final DBObject _keysWanted; private DBObject _orderBy = null; private String _hint = null; private boolean _explain = false; private int _numWanted = 0; private int _batchSize = 0; private int _skip = 0; private boolean _snapshot = false; private int _options = 0; // ---- result info ---- private Iterator<DBObject> _it = null; private boolean _fake = false; private CursorType _cursorType = null; private DBObject _cur = null; private int _num = 0; private final ArrayList<DBObject> _all = new ArrayList<DBObject>(); private final List<String> _nums = new ArrayList<String>(); }
Added getQuery method and set protection level to private for other fields
src/main/com/mongodb/DBCursor.java
Added getQuery method and set protection level to private for other fields
Java
apache-2.0
93d4678cd14f5a742ef2564acaaa0e87f49c75b0
0
Neoskai/greycat,electricalwind/greycat,datathings/greycat,electricalwind/greycat,Neoskai/greycat,datathings/greycat,Neoskai/greycat,electricalwind/greycat,electricalwind/greycat,Neoskai/greycat,Neoskai/greycat,datathings/greycat,Neoskai/greycat,electricalwind/greycat,datathings/greycat,electricalwind/greycat,datathings/greycat,datathings/greycat
/** * Copyright 2017 The GreyCat Authors. All rights reserved. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package greycat.ml.profiling; import greycat.Type; import greycat.struct.DMatrix; import greycat.struct.DoubleArray; import greycat.struct.ENode; import greycat.struct.matrix.MatrixOps; import greycat.struct.matrix.VolatileDMatrix; public class GaussianENode { //Getters and setters public final static String NAME = "GaussianENode"; private ENode backend; //can be used for normalization private double[] avg = null; private double[] std = null; private DMatrix cov = null; public GaussianENode(ENode backend) { if (backend == null) { throw new RuntimeException("backend can't be null for Gaussian node!"); } this.backend = backend; } public void setPrecisions(double[] precisions) { ((DoubleArray) backend.getOrCreate(Gaussian.PRECISIONS, Type.DOUBLE_ARRAY)).initWith(precisions); } public void learn(double[] values) { int features = values.length; int total = backend.getWithDefault(Gaussian.TOTAL, 0); //Create dirac only save total and sum if (total == 0) { double[] sum = new double[features]; System.arraycopy(values, 0, sum, 0, features); total = 1; backend.set(Gaussian.TOTAL, Type.INT, total); ((DoubleArray) backend.getOrCreate(Gaussian.SUM, Type.DOUBLE_ARRAY)).initWith(sum); //set total, weight, sum, return } else { DoubleArray sum; DoubleArray min = (DoubleArray) backend.getOrCreate(Gaussian.MIN, Type.DOUBLE_ARRAY); DoubleArray max = (DoubleArray) backend.getOrCreate(Gaussian.MAX, Type.DOUBLE_ARRAY); DoubleArray sumsquares = (DoubleArray) backend.getOrCreate(Gaussian.SUMSQ, Type.DOUBLE_ARRAY); sum = (DoubleArray) backend.get(Gaussian.SUM); if (features != sum.size()) { throw new RuntimeException("Input dimensions have changed!"); } //Upgrade dirac to gaussian if (total == 1) { //Create getMin, getMax, sumsquares double[] sumex = sum.extract(); min.initWith(sumex); max.initWith(sumex); sumsquares.init(features * (features + 1) / 2); int count = 0; for (int i = 0; i < features; i++) { for (int j = i; j < features; j++) { sumsquares.set(count, sumex[i] * sumex[j]); count++; } } } //Update the values for (int i = 0; i < features; i++) { if (values[i] < min.get(i)) { min.set(i, values[i]); } if (values[i] > max.get(i)) { max.set(i, values[i]); } sum.set(i, sum.get(i) + values[i]); } int count = 0; for (int i = 0; i < features; i++) { for (int j = i; j < features; j++) { sumsquares.set(count, sumsquares.get(count) + values[i] * values[j]); count++; } } total++; //Store everything backend.set(Gaussian.TOTAL, Type.INT, total); } // set all cached avg, std, and cov arrays to null invalidate(); } private void invalidate() { avg = null; std = null; cov = null; } private boolean initAvg() { if (avg != null) { return true; } int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total != 0) { double[] sum = ((DoubleArray) backend.get(Gaussian.SUM)).extract(); avg = new double[sum.length]; for (int i = 0; i < sum.length; i++) { avg[i] = sum[i] / total; } return true; } else { return false; } } private boolean initStd() { if (std != null) { return true; } int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total >= 2) { initAvg(); int dim = avg.length; double[] err = backend.getWithDefault(Gaussian.PRECISIONS, new double[avg.length]); double[] sumsq = getSumSq(); std = new double[dim]; double correction = total; correction = correction / (total - 1); int count = 0; for (int i = 0; i < dim; i++) { std[i] = Math.sqrt((sumsq[count] / total - avg[i] * avg[i]) * correction); count += (dim - i); if (std[i] < err[i]) { std[i] = err[i]; } } return true; } else { return false; } } private boolean initCov() { if (cov != null) { return true; } int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total >= 2) { initAvg(); int dim = avg.length; DoubleArray gp = (DoubleArray) backend.get(Gaussian.PRECISIONS); double[] err; if (gp != null) { err = gp.extract(); } else { err = new double[avg.length]; } for (int i = 0; i < err.length; i++) { err[i] = err[i] * err[i]; } double[] sumsq = getSumSq(); double[] covariances = new double[dim * dim]; double correction = total; correction = correction / (total - 1); int count = 0; for (int i = 0; i < dim; i++) { for (int j = i; j < dim; j++) { covariances[i * dim + j] = (sumsq[count] / total - avg[i] * avg[j]) * correction; covariances[j * dim + i] = covariances[i * dim + j]; count++; } if (covariances[i * dim + i] < err[i]) { covariances[i * dim + i] = err[i]; } } cov = VolatileDMatrix.wrap(covariances, dim, dim); return true; } else { return false; } } public double[] getAvg() { if (!initAvg()) { return null; } double[] tempAvg = new double[avg.length]; System.arraycopy(avg, 0, tempAvg, 0, avg.length); return tempAvg; } public double[] getSTD() { if (!initStd()) { return null; } double[] tempStd = new double[std.length]; System.arraycopy(std, 0, tempStd, 0, std.length); return tempStd; } public DMatrix getCovariance() { if (!initCov()) { return null; } VolatileDMatrix covtemp = VolatileDMatrix.empty(cov.rows(), cov.columns()); MatrixOps.copy(cov, covtemp); return covtemp; } public DMatrix getPearson() { if (!initCov()) { return null; } VolatileDMatrix covtemp = VolatileDMatrix.empty(cov.rows(), cov.columns()); for (int i = 0; i < covtemp.rows(); i++) { for (int j = 0; j < covtemp.columns(); j++) { if (cov.get(i, i) != 0 && cov.get(j, j) != 0) { covtemp.set(i, j, (cov.get(i, j) / Math.sqrt(cov.get(i, i) * cov.get(j, j)))); } } } return covtemp; } public double[] getSum() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total != 0) { return ((DoubleArray) backend.get(Gaussian.SUM)).extract(); } else { return null; } } public double[] getSumSq() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total == 0) { return null; } if (total == 1) { double[] sum = ((DoubleArray) backend.get(Gaussian.SUM)).extract(); int features = sum.length; double[] sumsquares = new double[features * (features + 1) / 2]; int count = 0; for (int i = 0; i < features; i++) { for (int j = i; j < features; j++) { sumsquares[count] = sum[i] * sum[j]; count++; } } return sumsquares; } else { return ((DoubleArray) backend.get(Gaussian.SUMSQ)).extract(); } } public double[] getMin() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total == 0) { return null; } if (total == 1) { return ((DoubleArray) backend.get(Gaussian.SUM)).extract(); } else { return ((DoubleArray) backend.get(Gaussian.MIN)).extract(); } } public double[] getMax() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total == 0) { return null; } if (total == 1) { return ((DoubleArray) backend.get(Gaussian.SUM)).extract(); } else { return ((DoubleArray) backend.get(Gaussian.MAX)).extract(); } } public int getTotal() { return backend.getWithDefault(Gaussian.TOTAL, 0); } public int getDimensions() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total != 0) { return ((DoubleArray) backend.get(Gaussian.SUM)).size(); } else { return 0; } } }
plugins/ml/src/main/java/greycat/ml/profiling/GaussianENode.java
/** * Copyright 2017 The GreyCat Authors. All rights reserved. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package greycat.ml.profiling; import greycat.Type; import greycat.struct.DMatrix; import greycat.struct.DoubleArray; import greycat.struct.ENode; import greycat.struct.matrix.MatrixOps; import greycat.struct.matrix.VolatileDMatrix; public class GaussianENode { //Getters and setters public final static String NAME = "GaussianENode"; private ENode backend; //can be used for normalization private double[] avg = null; private double[] std = null; private DMatrix cov = null; public GaussianENode(ENode backend) { if (backend == null) { throw new RuntimeException("backend can't be null for Gaussian node!"); } this.backend = backend; } public void setPrecisions(double[] precisions) { ((DoubleArray) backend.getOrCreate(Gaussian.PRECISIONS,Type.DOUBLE_ARRAY)).initWith(precisions); } public void learn(double[] values) { int features = values.length; int total = backend.getWithDefault(Gaussian.TOTAL, 0); //Create dirac only save total and sum if (total == 0) { double[] sum = new double[features]; System.arraycopy(values, 0, sum, 0, features); total = 1; backend.set(Gaussian.TOTAL, Type.INT, total); ((DoubleArray) backend.getOrCreate(Gaussian.SUM, Type.DOUBLE_ARRAY)).initWith(sum); //set total, weight, sum, return } else { DoubleArray sum; DoubleArray min = (DoubleArray) backend.getOrCreate(Gaussian.MIN, Type.DOUBLE_ARRAY); DoubleArray max = (DoubleArray) backend.getOrCreate(Gaussian.MAX, Type.DOUBLE_ARRAY); DoubleArray sumsquares = (DoubleArray) backend.getOrCreate(Gaussian.SUMSQ, Type.DOUBLE_ARRAY); sum = (DoubleArray) backend.get(Gaussian.SUM); if (features != sum.size()) { throw new RuntimeException("Input dimensions have changed!"); } //Upgrade dirac to gaussian if (total == 1) { //Create getMin, getMax, sumsquares double[] sumex = sum.extract(); min.initWith(sumex); max.initWith(sumex); sumsquares.init(features * (features + 1) / 2); int count = 0; for (int i = 0; i < features; i++) { for (int j = i; j < features; j++) { sumsquares.set(count, sumex[i] * sumex[j]); count++; } } } //Update the values for (int i = 0; i < features; i++) { if (values[i] < min.get(i)) { min.set(i, values[i]); } if (values[i] > max.get(i)) { max.set(i, values[i]); } sum.set(i, sum.get(i) + values[i]); } int count = 0; for (int i = 0; i < features; i++) { for (int j = i; j < features; j++) { sumsquares.set(count, sumsquares.get(count) + values[i] * values[j]); count++; } } total++; //Store everything backend.set(Gaussian.TOTAL, Type.INT, total); } // set all cached avg, std, and cov arrays to null invalidate(); } private void invalidate() { avg = null; std = null; cov = null; } private boolean initAvg() { if (avg != null) { return true; } int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total != 0) { double[] sum = ((DoubleArray)backend.get(Gaussian.SUM)).extract(); avg = new double[sum.length]; for (int i = 0; i < sum.length; i++) { avg[i] = sum[i] / total; } return true; } else { return false; } } private boolean initStd() { if (std != null) { return true; } int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total >= 2) { initAvg(); int dim = avg.length; double[] err = backend.getWithDefault(Gaussian.PRECISIONS, new double[avg.length]); double[] sumsq = getSumSq(); std = new double[dim]; double correction = total; correction = correction / (total - 1); int count = 0; for (int i = 0; i < dim; i++) { std[i] = Math.sqrt((sumsq[count] / total - avg[i] * avg[i]) * correction); count += (dim - i); if (std[i] < err[i]) { std[i] = err[i]; } } return true; } else { return false; } } private boolean initCov() { if (cov != null) { return true; } int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total >= 2) { initAvg(); int dim = avg.length; DoubleArray gp= (DoubleArray) backend.get(Gaussian.PRECISIONS); double[] err; if(gp!=null){ err=gp.extract(); } else { err=new double[avg.length]; } for (int i = 0; i < err.length; i++) { err[i] = err[i] * err[i]; } double[] sumsq = getSumSq(); double[] covariances = new double[dim * dim]; double correction = total; correction = correction / (total - 1); int count = 0; for (int i = 0; i < dim; i++) { for (int j = i; j < dim; j++) { covariances[i * dim + j] = (sumsq[count] / total - avg[i] * avg[j]) * correction; covariances[j * dim + i] = covariances[i * dim + j]; count++; } if (covariances[i * dim + i] < err[i]) { covariances[i * dim + i] = err[i]; } } cov = VolatileDMatrix.wrap(covariances, dim, dim); return true; } else { return false; } } public double[] getAvg() { if (!initAvg()) { return null; } double[] tempAvg = new double[avg.length]; System.arraycopy(avg, 0, tempAvg, 0, avg.length); return tempAvg; } public double[] getSTD() { if (!initStd()) { return null; } double[] tempStd = new double[std.length]; System.arraycopy(std, 0, tempStd, 0, std.length); return tempStd; } public DMatrix getCovariance() { if (!initCov()) { return null; } VolatileDMatrix covtemp = VolatileDMatrix.empty(cov.rows(), cov.columns()); MatrixOps.copy(cov, covtemp); return covtemp; } public DMatrix getPearson() { if (!initCov()) { return null; } VolatileDMatrix covtemp = VolatileDMatrix.empty(cov.rows(), cov.columns()); for(int i=0;i<covtemp.rows();i++){ for(int j=0;j<covtemp.columns();j++){ if(cov.get(i,i)!=0 && cov.get(j,j)!=0) { covtemp.set(i, j, cov.get(i, j) / (cov.get(i, i) * cov.get(j, j))); } } } return covtemp; } public double[] getSum() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total != 0) { return ((DoubleArray) backend.get(Gaussian.SUM)).extract(); } else { return null; } } public double[] getSumSq() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total == 0) { return null; } if (total == 1) { double[] sum = ((DoubleArray) backend.get(Gaussian.SUM)).extract(); int features = sum.length; double[] sumsquares = new double[features * (features + 1) / 2]; int count = 0; for (int i = 0; i < features; i++) { for (int j = i; j < features; j++) { sumsquares[count] = sum[i] * sum[j]; count++; } } return sumsquares; } else { return ((DoubleArray) backend.get(Gaussian.SUMSQ)).extract(); } } public double[] getMin() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total == 0) { return null; } if (total == 1) { return ((DoubleArray) backend.get(Gaussian.SUM)).extract(); } else { return ((DoubleArray) backend.get(Gaussian.MIN)).extract(); } } public double[] getMax() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total == 0) { return null; } if (total == 1) { return ((DoubleArray) backend.get(Gaussian.SUM)).extract(); } else { return ((DoubleArray) backend.get(Gaussian.MAX)).extract(); } } public int getTotal() { return backend.getWithDefault(Gaussian.TOTAL, 0); } public int getDimensions() { int total = backend.getWithDefault(Gaussian.TOTAL, 0); if (total != 0) { return ((DoubleArray) backend.get(Gaussian.SUM)).size(); } else { return 0; } } }
refix
plugins/ml/src/main/java/greycat/ml/profiling/GaussianENode.java
refix
Java
apache-2.0
ac8d3f6d1da4fce2552628b85b5d3b10addf3379
0
nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch
import java.util.*; public class Translator { public Translator (boolean debug) { _debug = debug; } public void translate (Vector<Food> foods) { for (int i = 0; i < foods.size(); i++) { Food toCheck = foods.elementAt(i); } } private boolean _debug; }
AdventOfCode/2020/day21/Babel.java
public class Translator { public Translator (boolean debug) { _debug = debug; } public void translate () { private boolean _debug; }
Update Babel.java
AdventOfCode/2020/day21/Babel.java
Update Babel.java
Java
apache-2.0
f54bf809aaf72a094ea60ce243550cb8ed198ee0
0
alina-ipatina/pentaho-kettle,yshakhau/pentaho-kettle,DFieldFL/pentaho-kettle,eayoungs/pentaho-kettle,lgrill-pentaho/pentaho-kettle,CapeSepias/pentaho-kettle,tkafalas/pentaho-kettle,e-cuellar/pentaho-kettle,ddiroma/pentaho-kettle,matthewtckr/pentaho-kettle,stepanovdg/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,pymjer/pentaho-kettle,bmorrise/pentaho-kettle,pymjer/pentaho-kettle,MikhailHubanau/pentaho-kettle,matrix-stone/pentaho-kettle,akhayrutdinov/pentaho-kettle,denisprotopopov/pentaho-kettle,e-cuellar/pentaho-kettle,ma459006574/pentaho-kettle,e-cuellar/pentaho-kettle,HiromuHota/pentaho-kettle,SergeyTravin/pentaho-kettle,jbrant/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,yshakhau/pentaho-kettle,pavel-sakun/pentaho-kettle,hudak/pentaho-kettle,gretchiemoran/pentaho-kettle,flbrino/pentaho-kettle,emartin-pentaho/pentaho-kettle,matthewtckr/pentaho-kettle,drndos/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,ViswesvarSekar/pentaho-kettle,ddiroma/pentaho-kettle,emartin-pentaho/pentaho-kettle,flbrino/pentaho-kettle,nanata1115/pentaho-kettle,matthewtckr/pentaho-kettle,mattyb149/pentaho-kettle,e-cuellar/pentaho-kettle,stevewillcock/pentaho-kettle,YuryBY/pentaho-kettle,nantunes/pentaho-kettle,pminutillo/pentaho-kettle,roboguy/pentaho-kettle,tmcsantos/pentaho-kettle,YuryBY/pentaho-kettle,CapeSepias/pentaho-kettle,drndos/pentaho-kettle,yshakhau/pentaho-kettle,pedrofvteixeira/pentaho-kettle,matrix-stone/pentaho-kettle,pentaho/pentaho-kettle,matrix-stone/pentaho-kettle,rmansoor/pentaho-kettle,mbatchelor/pentaho-kettle,mbatchelor/pentaho-kettle,Advent51/pentaho-kettle,jbrant/pentaho-kettle,alina-ipatina/pentaho-kettle,ccaspanello/pentaho-kettle,pedrofvteixeira/pentaho-kettle,gretchiemoran/pentaho-kettle,pavel-sakun/pentaho-kettle,kurtwalker/pentaho-kettle,mkambol/pentaho-kettle,codek/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,aminmkhan/pentaho-kettle,airy-ict/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,sajeetharan/pentaho-kettle,wseyler/pentaho-kettle,lgrill-pentaho/pentaho-kettle,marcoslarsen/pentaho-kettle,ddiroma/pentaho-kettle,mattyb149/pentaho-kettle,pentaho/pentaho-kettle,yshakhau/pentaho-kettle,ma459006574/pentaho-kettle,stepanovdg/pentaho-kettle,dkincade/pentaho-kettle,mattyb149/pentaho-kettle,MikhailHubanau/pentaho-kettle,lgrill-pentaho/pentaho-kettle,nantunes/pentaho-kettle,pymjer/pentaho-kettle,nanata1115/pentaho-kettle,jbrant/pentaho-kettle,pentaho/pentaho-kettle,wseyler/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,brosander/pentaho-kettle,wseyler/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,ccaspanello/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,YuryBY/pentaho-kettle,marcoslarsen/pentaho-kettle,nicoben/pentaho-kettle,tkafalas/pentaho-kettle,rmansoor/pentaho-kettle,Advent51/pentaho-kettle,zlcnju/kettle,CapeSepias/pentaho-kettle,flbrino/pentaho-kettle,hudak/pentaho-kettle,ivanpogodin/pentaho-kettle,stepanovdg/pentaho-kettle,pymjer/pentaho-kettle,flbrino/pentaho-kettle,tmcsantos/pentaho-kettle,kurtwalker/pentaho-kettle,GauravAshara/pentaho-kettle,mbatchelor/pentaho-kettle,eayoungs/pentaho-kettle,ccaspanello/pentaho-kettle,hudak/pentaho-kettle,mattyb149/pentaho-kettle,birdtsai/pentaho-kettle,birdtsai/pentaho-kettle,dkincade/pentaho-kettle,mdamour1976/pentaho-kettle,lgrill-pentaho/pentaho-kettle,codek/pentaho-kettle,skofra0/pentaho-kettle,bmorrise/pentaho-kettle,matthewtckr/pentaho-kettle,DFieldFL/pentaho-kettle,denisprotopopov/pentaho-kettle,aminmkhan/pentaho-kettle,graimundo/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,ViswesvarSekar/pentaho-kettle,DFieldFL/pentaho-kettle,rmansoor/pentaho-kettle,ivanpogodin/pentaho-kettle,kurtwalker/pentaho-kettle,akhayrutdinov/pentaho-kettle,EcoleKeine/pentaho-kettle,pminutillo/pentaho-kettle,kurtwalker/pentaho-kettle,TatsianaKasiankova/pentaho-kettle,stepanovdg/pentaho-kettle,gretchiemoran/pentaho-kettle,graimundo/pentaho-kettle,marcoslarsen/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,skofra0/pentaho-kettle,ViswesvarSekar/pentaho-kettle,nicoben/pentaho-kettle,cjsonger/pentaho-kettle,emartin-pentaho/pentaho-kettle,airy-ict/pentaho-kettle,cjsonger/pentaho-kettle,alina-ipatina/pentaho-kettle,HiromuHota/pentaho-kettle,SergeyTravin/pentaho-kettle,stevewillcock/pentaho-kettle,dkincade/pentaho-kettle,ddiroma/pentaho-kettle,mdamour1976/pentaho-kettle,pminutillo/pentaho-kettle,mdamour1976/pentaho-kettle,mkambol/pentaho-kettle,emartin-pentaho/pentaho-kettle,mdamour1976/pentaho-kettle,HiromuHota/pentaho-kettle,ViswesvarSekar/pentaho-kettle,wseyler/pentaho-kettle,roboguy/pentaho-kettle,graimundo/pentaho-kettle,airy-ict/pentaho-kettle,nanata1115/pentaho-kettle,stevewillcock/pentaho-kettle,GauravAshara/pentaho-kettle,marcoslarsen/pentaho-kettle,cjsonger/pentaho-kettle,birdtsai/pentaho-kettle,EcoleKeine/pentaho-kettle,brosander/pentaho-kettle,akhayrutdinov/pentaho-kettle,EcoleKeine/pentaho-kettle,tkafalas/pentaho-kettle,MikhailHubanau/pentaho-kettle,denisprotopopov/pentaho-kettle,brosander/pentaho-kettle,nicoben/pentaho-kettle,drndos/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,SergeyTravin/pentaho-kettle,ivanpogodin/pentaho-kettle,tmcsantos/pentaho-kettle,mbatchelor/pentaho-kettle,nanata1115/pentaho-kettle,roboguy/pentaho-kettle,sajeetharan/pentaho-kettle,nantunes/pentaho-kettle,pedrofvteixeira/pentaho-kettle,GauravAshara/pentaho-kettle,gretchiemoran/pentaho-kettle,skofra0/pentaho-kettle,bmorrise/pentaho-kettle,HiromuHota/pentaho-kettle,pavel-sakun/pentaho-kettle,hudak/pentaho-kettle,graimundo/pentaho-kettle,sajeetharan/pentaho-kettle,aminmkhan/pentaho-kettle,eayoungs/pentaho-kettle,zlcnju/kettle,SergeyTravin/pentaho-kettle,akhayrutdinov/pentaho-kettle,ma459006574/pentaho-kettle,zlcnju/kettle,TatsianaKasiankova/pentaho-kettle,ccaspanello/pentaho-kettle,EcoleKeine/pentaho-kettle,tkafalas/pentaho-kettle,drndos/pentaho-kettle,alina-ipatina/pentaho-kettle,eayoungs/pentaho-kettle,rmansoor/pentaho-kettle,codek/pentaho-kettle,pedrofvteixeira/pentaho-kettle,denisprotopopov/pentaho-kettle,zlcnju/kettle,pentaho/pentaho-kettle,nicoben/pentaho-kettle,birdtsai/pentaho-kettle,aminmkhan/pentaho-kettle,dkincade/pentaho-kettle,GauravAshara/pentaho-kettle,Advent51/pentaho-kettle,nantunes/pentaho-kettle,brosander/pentaho-kettle,YuryBY/pentaho-kettle,AlexanderBuloichik/pentaho-kettle,matrix-stone/pentaho-kettle,stevewillcock/pentaho-kettle,ivanpogodin/pentaho-kettle,codek/pentaho-kettle,airy-ict/pentaho-kettle,jbrant/pentaho-kettle,sajeetharan/pentaho-kettle,DFieldFL/pentaho-kettle,pminutillo/pentaho-kettle,bmorrise/pentaho-kettle,Advent51/pentaho-kettle,roboguy/pentaho-kettle,mkambol/pentaho-kettle,skofra0/pentaho-kettle,cjsonger/pentaho-kettle,mkambol/pentaho-kettle,tmcsantos/pentaho-kettle,IvanNikolaychuk/pentaho-kettle,pavel-sakun/pentaho-kettle,ma459006574/pentaho-kettle,AliaksandrShuhayeu/pentaho-kettle,CapeSepias/pentaho-kettle
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.movefiles; import static org.pentaho.di.job.entry.validator.AbstractFileValidator.putVariableSpace; import static org.pentaho.di.job.entry.validator.AndValidator.putValidators; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileExistsValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notNullValidator; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.vfs.AllFileSelector; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.FileSelectInfo; import org.apache.commons.vfs.FileType; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.Job; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.job.entry.validator.ValidatorContext; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /** * This defines a 'move files' job entry. * * @author Samatar Hassan * @since 25-02-2008 */ public class JobEntryMoveFiles extends JobEntryBase implements Cloneable, JobEntryInterface { private static Class<?> PKG = JobEntryMoveFiles.class; // for i18n purposes, needed by Translator2!! public boolean move_empty_folders; public boolean arg_from_previous; public boolean include_subfolders; public boolean add_result_filesname; public boolean destination_is_a_file; public boolean create_destination_folder; public String[] source_filefolder; public String[] destination_filefolder; public String[] wildcard; private String nr_errors_less_than; private String success_condition; public String SUCCESS_IF_AT_LEAST_X_FILES_UN_ZIPPED = "success_when_at_least"; public String SUCCESS_IF_ERRORS_LESS = "success_if_errors_less"; public String SUCCESS_IF_NO_ERRORS = "success_if_no_errors"; private boolean add_date; private boolean add_time; private boolean SpecifyFormat; private String date_time_format; private boolean AddDateBeforeExtension; private boolean DoNotKeepFolderStructure; private String iffileexists; private String destinationFolder; private String ifmovedfileexists; private String moved_date_time_format; private boolean AddMovedDateBeforeExtension; private boolean add_moved_date; private boolean add_moved_time; private boolean SpecifyMoveFormat; public boolean create_move_to_folder; public boolean simulate; int NrErrors = 0; int NrSuccess = 0; boolean successConditionBroken = false; boolean successConditionBrokenExit = false; int limitFiles = 0; public JobEntryMoveFiles( String n ) { super( n, "" ); simulate = false; create_move_to_folder = false; SpecifyMoveFormat = false; add_moved_date = false; add_moved_time = false; AddMovedDateBeforeExtension = false; moved_date_time_format = null; ifmovedfileexists = "do_nothing"; destinationFolder = null; DoNotKeepFolderStructure = false; move_empty_folders = true; arg_from_previous = false; source_filefolder = null; destination_filefolder = null; wildcard = null; include_subfolders = false; add_result_filesname = false; destination_is_a_file = false; create_destination_folder = false; nr_errors_less_than = "10"; success_condition = SUCCESS_IF_NO_ERRORS; add_date = false; add_time = false; SpecifyFormat = false; date_time_format = null; AddDateBeforeExtension = false; iffileexists = "do_nothing"; } public JobEntryMoveFiles() { this( "" ); } public Object clone() { JobEntryMoveFiles je = (JobEntryMoveFiles) super.clone(); return je; } public String getXML() { StringBuffer retval = new StringBuffer( 300 ); retval.append( super.getXML() ); retval.append( " " ).append( XMLHandler.addTagValue( "move_empty_folders", move_empty_folders ) ); retval.append( " " ).append( XMLHandler.addTagValue( "arg_from_previous", arg_from_previous ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", include_subfolders ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_result_filesname", add_result_filesname ) ); retval.append( " " ).append( XMLHandler.addTagValue( "destination_is_a_file", destination_is_a_file ) ); retval.append( " " ).append( XMLHandler.addTagValue( "create_destination_folder", create_destination_folder ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_date", add_date ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_time", add_time ) ); retval.append( " " ).append( XMLHandler.addTagValue( "SpecifyFormat", SpecifyFormat ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_time_format", date_time_format ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nr_errors_less_than", nr_errors_less_than ) ); retval.append( " " ).append( XMLHandler.addTagValue( "success_condition", success_condition ) ); retval.append( " " ).append( XMLHandler.addTagValue( "AddDateBeforeExtension", AddDateBeforeExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "DoNotKeepFolderStructure", DoNotKeepFolderStructure ) ); retval.append( " " ).append( XMLHandler.addTagValue( "iffileexists", iffileexists ) ); retval.append( " " ).append( XMLHandler.addTagValue( "destinationFolder", destinationFolder ) ); retval.append( " " ).append( XMLHandler.addTagValue( "ifmovedfileexists", ifmovedfileexists ) ); retval.append( " " ).append( XMLHandler.addTagValue( "moved_date_time_format", moved_date_time_format ) ); retval.append( " " ).append( XMLHandler.addTagValue( "create_move_to_folder", create_move_to_folder ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_moved_date", add_moved_date ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_moved_time", add_moved_time ) ); retval.append( " " ).append( XMLHandler.addTagValue( "SpecifyMoveFormat", SpecifyMoveFormat ) ); retval.append( " " ).append( XMLHandler.addTagValue( "AddMovedDateBeforeExtension", AddMovedDateBeforeExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "simulate", simulate ) ); retval.append( " <fields>" ).append( Const.CR ); if ( source_filefolder != null ) { for ( int i = 0; i < source_filefolder.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "source_filefolder", source_filefolder[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "destination_filefolder", destination_filefolder[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", wildcard[i] ) ); retval.append( " </field>" ).append( Const.CR ); } } retval.append( " </fields>" ).append( Const.CR ); return retval.toString(); } public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); move_empty_folders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "move_empty_folders" ) ); arg_from_previous = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) ); include_subfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) ); add_result_filesname = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_result_filesname" ) ); destination_is_a_file = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "destination_is_a_file" ) ); create_destination_folder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "create_destination_folder" ) ); add_date = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_date" ) ); add_time = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_time" ) ); SpecifyFormat = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "SpecifyFormat" ) ); AddDateBeforeExtension = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "AddDateBeforeExtension" ) ); DoNotKeepFolderStructure = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "DoNotKeepFolderStructure" ) ); date_time_format = XMLHandler.getTagValue( entrynode, "date_time_format" ); nr_errors_less_than = XMLHandler.getTagValue( entrynode, "nr_errors_less_than" ); success_condition = XMLHandler.getTagValue( entrynode, "success_condition" ); iffileexists = XMLHandler.getTagValue( entrynode, "iffileexists" ); destinationFolder = XMLHandler.getTagValue( entrynode, "destinationFolder" ); ifmovedfileexists = XMLHandler.getTagValue( entrynode, "ifmovedfileexists" ); moved_date_time_format = XMLHandler.getTagValue( entrynode, "moved_date_time_format" ); AddMovedDateBeforeExtension = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "AddMovedDateBeforeExtension" ) ); create_move_to_folder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "create_move_to_folder" ) ); add_moved_date = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_moved_date" ) ); add_moved_time = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_moved_time" ) ); SpecifyMoveFormat = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "SpecifyMoveFormat" ) ); simulate = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "simulate" ) ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); source_filefolder = new String[nrFields]; destination_filefolder = new String[nrFields]; wildcard = new String[nrFields]; // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); source_filefolder[i] = XMLHandler.getTagValue( fnode, "source_filefolder" ); destination_filefolder[i] = XMLHandler.getTagValue( fnode, "destination_filefolder" ); wildcard[i] = XMLHandler.getTagValue( fnode, "wildcard" ); } } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.UnableLoadXML" ), xe ); } } public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { move_empty_folders = rep.getJobEntryAttributeBoolean( id_jobentry, "move_empty_folders" ); arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); add_result_filesname = rep.getJobEntryAttributeBoolean( id_jobentry, "add_result_filesname" ); destination_is_a_file = rep.getJobEntryAttributeBoolean( id_jobentry, "destination_is_a_file" ); create_destination_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_destination_folder" ); nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" ); success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" ); add_date = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" ); add_time = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" ); SpecifyFormat = rep.getJobEntryAttributeBoolean( id_jobentry, "SpecifyFormat" ); date_time_format = rep.getJobEntryAttributeString( id_jobentry, "date_time_format" ); AddDateBeforeExtension = rep.getJobEntryAttributeBoolean( id_jobentry, "AddDateBeforeExtension" ); DoNotKeepFolderStructure = rep.getJobEntryAttributeBoolean( id_jobentry, "DoNotKeepFolderStructure" ); iffileexists = rep.getJobEntryAttributeString( id_jobentry, "iffileexists" ); destinationFolder = rep.getJobEntryAttributeString( id_jobentry, "destinationFolder" ); ifmovedfileexists = rep.getJobEntryAttributeString( id_jobentry, "ifmovedfileexists" ); moved_date_time_format = rep.getJobEntryAttributeString( id_jobentry, "moved_date_time_format" ); AddMovedDateBeforeExtension = rep.getJobEntryAttributeBoolean( id_jobentry, "AddMovedDateBeforeExtension" ); create_move_to_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_move_to_folder" ); add_moved_date = rep.getJobEntryAttributeBoolean( id_jobentry, "add_moved_date" ); add_moved_time = rep.getJobEntryAttributeBoolean( id_jobentry, "add_moved_time" ); SpecifyMoveFormat = rep.getJobEntryAttributeBoolean( id_jobentry, "SpecifyMoveFormat" ); simulate = rep.getJobEntryAttributeBoolean( id_jobentry, "simulate" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); source_filefolder = new String[argnr]; destination_filefolder = new String[argnr]; wildcard = new String[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" ); destination_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "destination_filefolder" ); wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.UnableLoadRep" ) + id_jobentry, dbe ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "move_empty_folders", move_empty_folders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", arg_from_previous ); rep.saveJobEntryAttribute( id_job, getObjectId(), "include_subfolders", include_subfolders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_result_filesname", add_result_filesname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "destination_is_a_file", destination_is_a_file ); rep.saveJobEntryAttribute( id_job, getObjectId(), "create_destination_folder", create_destination_folder ); rep.saveJobEntryAttribute( id_job, getObjectId(), "nr_errors_less_than", nr_errors_less_than ); rep.saveJobEntryAttribute( id_job, getObjectId(), "success_condition", success_condition ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_date", add_date ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_time", add_time ); rep.saveJobEntryAttribute( id_job, getObjectId(), "SpecifyFormat", SpecifyFormat ); rep.saveJobEntryAttribute( id_job, getObjectId(), "date_time_format", date_time_format ); rep.saveJobEntryAttribute( id_job, getObjectId(), "AddDateBeforeExtension", AddDateBeforeExtension ); rep.saveJobEntryAttribute( id_job, getObjectId(), "DoNotKeepFolderStructure", DoNotKeepFolderStructure ); rep.saveJobEntryAttribute( id_job, getObjectId(), "iffileexists", iffileexists ); rep.saveJobEntryAttribute( id_job, getObjectId(), "destinationFolder", destinationFolder ); rep.saveJobEntryAttribute( id_job, getObjectId(), "ifmovedfileexists", ifmovedfileexists ); rep.saveJobEntryAttribute( id_job, getObjectId(), "moved_date_time_format", moved_date_time_format ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_moved_date", add_moved_date ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_moved_time", add_moved_time ); rep.saveJobEntryAttribute( id_job, getObjectId(), "SpecifyMoveFormat", SpecifyMoveFormat ); rep.saveJobEntryAttribute( id_job, getObjectId(), "create_move_to_folder", create_move_to_folder ); rep .saveJobEntryAttribute( id_job, getObjectId(), "AddMovedDateBeforeExtension", AddMovedDateBeforeExtension ); rep.saveJobEntryAttribute( id_job, getObjectId(), "simulate", simulate ); // save the arguments... if ( source_filefolder != null ) { for ( int i = 0; i < source_filefolder.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "source_filefolder", source_filefolder[i] ); rep .saveJobEntryAttribute( id_job, getObjectId(), i, "destination_filefolder", destination_filefolder[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "wildcard", wildcard[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.UnableSaveRep" ) + id_job, dbe ); } } public Result execute( Result previousResult, int nr ) throws KettleException { Result result = previousResult; List<RowMetaAndData> rows = result.getRows(); RowMetaAndData resultRow = null; result.setNrErrors( 1 ); result.setResult( false ); NrErrors = 0; NrSuccess = 0; successConditionBroken = false; successConditionBrokenExit = false; limitFiles = Const.toInt( environmentSubstitute( getNrErrorsLessThan() ), 10 ); if ( log.isDetailed() ) { if ( simulate ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.SimulationOn" ) ); } if ( include_subfolders ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.IncludeSubFoldersOn" ) ); } } String MoveToFolder = environmentSubstitute( destinationFolder ); // Get source and destination files, also wildcard String[] vsourcefilefolder = source_filefolder; String[] vdestinationfilefolder = destination_filefolder; String[] vwildcard = wildcard; if ( iffileexists.equals( "move_file" ) ) { if ( Const.isEmpty( MoveToFolder ) ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.MoveToFolderMissing" ) ); return result; } FileObject folder = null; try { folder = KettleVFS.getFileObject( MoveToFolder, this ); if ( !folder.exists() ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.FolderMissing", MoveToFolder ) ); } if ( create_move_to_folder ) { folder.createFolder(); } else { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.FolderMissing", MoveToFolder ) ); return result; } } if ( !folder.getType().equals( FileType.FOLDER ) ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.NotFolder", MoveToFolder ) ); return result; } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.GettingMoveToFolder", MoveToFolder, e .getMessage() ) ); return result; } finally { if ( folder != null ) { try { folder.close(); } catch ( IOException ex ) { /* Ignore */ } } } } if ( arg_from_previous ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.ArgFromPrevious.Found", ( rows != null ? rows .size() : 0 ) + "" ) ); } } if ( arg_from_previous && rows != null ) { for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) { // Success condition broken? if ( successConditionBroken ) { if ( !successConditionBrokenExit ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.SuccessConditionbroken", "" + NrErrors ) ); successConditionBrokenExit = true; } result.setNrErrors( NrErrors ); displayResults(); return result; } resultRow = rows.get( iteration ); // Get source and destination file names, also wildcard String vsourcefilefolder_previous = resultRow.getString( 0, null ); String vdestinationfilefolder_previous = resultRow.getString( 1, null ); String vwildcard_previous = resultRow.getString( 2, null ); if ( !Const.isEmpty( vsourcefilefolder_previous ) && !Const.isEmpty( vdestinationfilefolder_previous ) ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.ProcessingRow", vsourcefilefolder_previous, vdestinationfilefolder_previous, vwildcard_previous ) ); } if ( !ProcessFileFolder( vsourcefilefolder_previous, vdestinationfilefolder_previous, vwildcard_previous, parentJob, result, MoveToFolder ) ) { // The move process fail // Update Errors updateErrors(); } } else { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.IgnoringRow", vsourcefilefolder[iteration], vdestinationfilefolder[iteration], vwildcard[iteration] ) ); } } } } else if ( vsourcefilefolder != null && vdestinationfilefolder != null ) { for ( int i = 0; i < vsourcefilefolder.length && !parentJob.isStopped(); i++ ) { // Success condition broken? if ( successConditionBroken ) { if ( !successConditionBrokenExit ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.SuccessConditionbroken", "" + NrErrors ) ); successConditionBrokenExit = true; } result.setNrErrors( NrErrors ); displayResults(); return result; } if ( !Const.isEmpty( vsourcefilefolder[i] ) && !Const.isEmpty( vdestinationfilefolder[i] ) ) { // ok we can process this file/folder if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.ProcessingRow", vsourcefilefolder[i], vdestinationfilefolder[i], vwildcard[i] ) ); } if ( !ProcessFileFolder( vsourcefilefolder[i], vdestinationfilefolder[i], vwildcard[i], parentJob, result, MoveToFolder ) ) { // Update Errors updateErrors(); } } else { if ( log.isDetailed() ) { logDetailed( BaseMessages .getString( PKG, "JobMoveFiles.Log.IgnoringRow", vsourcefilefolder[i], vdestinationfilefolder[i], vwildcard[i] ) ); } } } } // Success Condition result.setNrErrors( NrErrors ); result.setNrLinesWritten( NrSuccess ); if ( getSuccessStatus() ) { result.setResult( true ); } displayResults(); return result; } private void displayResults() { if ( log.isDetailed() ) { logDetailed( "=======================================" ); logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.Info.FilesInError", "" + NrErrors ) ); logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.Info.FilesInSuccess", "" + NrSuccess ) ); logDetailed( "=======================================" ); } } private boolean getSuccessStatus() { boolean retval = false; if ( ( NrErrors == 0 && getSuccessCondition().equals( SUCCESS_IF_NO_ERRORS ) ) || ( NrSuccess >= limitFiles && getSuccessCondition().equals( SUCCESS_IF_AT_LEAST_X_FILES_UN_ZIPPED ) ) || ( NrErrors <= limitFiles && getSuccessCondition().equals( SUCCESS_IF_ERRORS_LESS ) ) ) { retval = true; } return retval; } private boolean ProcessFileFolder( String sourcefilefoldername, String destinationfilefoldername, String wildcard, Job parentJob, Result result, String MoveToFolder ) { boolean entrystatus = false; FileObject sourcefilefolder = null; FileObject destinationfilefolder = null; FileObject movetofolderfolder = null; FileObject Currentfile = null; // Get real source, destination file and wildcard String realSourceFilefoldername = environmentSubstitute( sourcefilefoldername ); String realDestinationFilefoldername = environmentSubstitute( destinationfilefoldername ); String realWildcard = environmentSubstitute( wildcard ); try { sourcefilefolder = KettleVFS.getFileObject( realSourceFilefoldername, this ); destinationfilefolder = KettleVFS.getFileObject( realDestinationFilefoldername, this ); if ( !Const.isEmpty( MoveToFolder ) ) { movetofolderfolder = KettleVFS.getFileObject( MoveToFolder, this ); } if ( sourcefilefolder.exists() ) { // Check if destination folder/parent folder exists ! // If user wanted and if destination folder does not exist // PDI will create it if ( CreateDestinationFolder( destinationfilefolder ) ) { // Basic Tests if ( sourcefilefolder.getType().equals( FileType.FOLDER ) && destination_is_a_file ) { // Source is a folder, destination is a file // WARNING !!! CAN NOT MOVE FOLDER TO FILE !!! log.logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Forbidden" ), BaseMessages.getString( PKG, "JobMoveFiles.Log.CanNotMoveFolderToFile", realSourceFilefoldername, realDestinationFilefoldername ) ); // Update Errors updateErrors(); } else { if ( destinationfilefolder.getType().equals( FileType.FOLDER ) && sourcefilefolder.getType().equals( FileType.FILE ) ) { // Source is a file, destination is a folder // return destination short filename String shortfilename = sourcefilefolder.getName().getBaseName(); try { shortfilename = getDestinationFilename( shortfilename ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", sourcefilefolder.getName().getBaseName(), e .toString() ) ) ); return entrystatus; } // Move the file to the destination folder String destinationfilenamefull = KettleVFS.getFilename( destinationfilefolder ) + Const.FILE_SEPARATOR + shortfilename; FileObject destinationfile = KettleVFS.getFileObject( destinationfilenamefull, this ); entrystatus = MoveFile( shortfilename, sourcefilefolder, destinationfile, movetofolderfolder, parentJob, result ); return entrystatus; } else if ( sourcefilefolder.getType().equals( FileType.FILE ) && destination_is_a_file ) { // Source is a file, destination is a file FileObject destinationfile = KettleVFS.getFileObject( realDestinationFilefoldername, this ); // return destination short filename String shortfilename = destinationfile.getName().getBaseName(); try { shortfilename = getDestinationFilename( shortfilename ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", sourcefilefolder.getName().getBaseName(), e .toString() ) ) ); return entrystatus; } String destinationfilenamefull = KettleVFS.getFilename( destinationfile.getParent() ) + Const.FILE_SEPARATOR + shortfilename; destinationfile = KettleVFS.getFileObject( destinationfilenamefull, this ); entrystatus = MoveFile( shortfilename, sourcefilefolder, destinationfile, movetofolderfolder, parentJob, result ); return entrystatus; } else { // Both source and destination are folders if ( log.isDetailed() ) { logDetailed( " " ); logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FetchFolder", sourcefilefolder .toString() ) ); } FileObject[] fileObjects = sourcefilefolder.findFiles( new AllFileSelector() { public boolean traverseDescendents( FileSelectInfo info ) { return true; } public boolean includeFile( FileSelectInfo info ) { FileObject fileObject = info.getFile(); try { if ( fileObject == null ) { return false; } } catch ( Exception ex ) { // Upon error don't process the file. return false; } finally { if ( fileObject != null ) { try { fileObject.close(); } catch ( IOException ex ) { /* Ignore */ } } } return true; } } ); if ( fileObjects != null ) { for ( int j = 0; j < fileObjects.length && !parentJob.isStopped(); j++ ) { // Success condition broken? if ( successConditionBroken ) { if ( !successConditionBrokenExit ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.SuccessConditionbroken", "" + NrErrors ) ); successConditionBrokenExit = true; } return false; } // Fetch files in list one after one ... Currentfile = fileObjects[j]; if ( !MoveOneFile( Currentfile, sourcefilefolder, realDestinationFilefoldername, realWildcard, parentJob, result, movetofolderfolder ) ) { // Update Errors updateErrors(); } } } } } entrystatus = true; } else { // Destination Folder or Parent folder is missing logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.DestinationFolderNotFound", realDestinationFilefoldername ) ); } } else { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.SourceFileNotExists", realSourceFilefoldername ) ); } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.MoveProcess", realSourceFilefoldername .toString(), destinationfilefolder.toString(), e.getMessage() ) ); } finally { if ( sourcefilefolder != null ) { try { sourcefilefolder.close(); } catch ( IOException ex ) { /* Ignore */ } } if ( destinationfilefolder != null ) { try { destinationfilefolder.close(); } catch ( IOException ex ) { /* Ignore */ } } if ( Currentfile != null ) { try { Currentfile.close(); } catch ( IOException ex ) { /* Ignore */ } } if ( movetofolderfolder != null ) { try { movetofolderfolder.close(); } catch ( IOException ex ) { /* Ignore */ } } } return entrystatus; } private boolean MoveFile( String shortfilename, FileObject sourcefilename, FileObject destinationfilename, FileObject movetofolderfolder, Job parentJob, Result result ) { FileObject destinationfile = null; boolean retval = false; try { if ( !destinationfilename.exists() ) { if ( !simulate ) { sourcefilename.moveTo( destinationfilename ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileMoved", sourcefilename .getName().toString(), destinationfilename.getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfilename.toString(), result, parentJob ); } updateSuccess(); retval = true; } else { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileExists", destinationfilename.toString() ) ); } if ( iffileexists.equals( "overwrite_file" ) ) { if ( !simulate ) { sourcefilename.moveTo( destinationfilename ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileOverwrite", destinationfilename .getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfilename.toString(), result, parentJob ); } updateSuccess(); retval = true; } else if ( iffileexists.equals( "unique_name" ) ) { String short_filename = shortfilename; // return destination short filename try { short_filename = getMoveDestinationFilename( short_filename, "ddMMyyyy_HHmmssSSS" ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", short_filename ) ), e ); return retval; } String movetofilenamefull = destinationfilename.getParent().toString() + Const.FILE_SEPARATOR + short_filename; destinationfile = KettleVFS.getFileObject( movetofilenamefull, this ); if ( !simulate ) { sourcefilename.moveTo( destinationfile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileMoved", sourcefilename .getName().toString(), destinationfile.getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfile.toString(), result, parentJob ); } updateSuccess(); retval = true; } else if ( iffileexists.equals( "delete_file" ) ) { if ( !simulate ) { sourcefilename.delete(); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileDeleted", destinationfilename .getName().toString() ) ); } updateSuccess(); retval = true; } else if ( iffileexists.equals( "move_file" ) ) { String short_filename = shortfilename; // return destination short filename try { short_filename = getMoveDestinationFilename( short_filename, null ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", short_filename ) ), e ); return retval; } String movetofilenamefull = movetofolderfolder.toString() + Const.FILE_SEPARATOR + short_filename; destinationfile = KettleVFS.getFileObject( movetofilenamefull, this ); if ( !destinationfile.exists() ) { if ( !simulate ) { sourcefilename.moveTo( destinationfile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileMoved", sourcefilename .getName().toString(), destinationfile.getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfile.toString(), result, parentJob ); } } else { if ( ifmovedfileexists.equals( "overwrite_file" ) ) { if ( !simulate ) { sourcefilename.moveTo( destinationfile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileOverwrite", destinationfile .getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfile.toString(), result, parentJob ); } updateSuccess(); retval = true; } else if ( ifmovedfileexists.equals( "unique_name" ) ) { SimpleDateFormat daf = new SimpleDateFormat(); Date now = new Date(); daf.applyPattern( "ddMMyyyy_HHmmssSSS" ); String dt = daf.format( now ); short_filename += "_" + dt; String destinationfilenamefull = movetofolderfolder.toString() + Const.FILE_SEPARATOR + short_filename; destinationfile = KettleVFS.getFileObject( destinationfilenamefull, this ); if ( !simulate ) { sourcefilename.moveTo( destinationfile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileMoved", destinationfile .getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfile.toString(), result, parentJob ); } updateSuccess(); retval = true; } else if ( ifmovedfileexists.equals( "fail" ) ) { // Update Errors updateErrors(); } } } else if ( iffileexists.equals( "fail" ) ) { // Update Errors updateErrors(); } } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.MoveProcessError", sourcefilename .toString(), destinationfilename.toString(), e.getMessage() ) ); updateErrors(); } finally { if ( destinationfile != null ) { try { destinationfile.close(); } catch ( IOException ex ) { /* Ignore */ } } } return retval; } private boolean MoveOneFile( FileObject Currentfile, FileObject sourcefilefolder, String realDestinationFilefoldername, String realWildcard, Job parentJob, Result result, FileObject movetofolderfolder ) { boolean entrystatus = false; FileObject file_name = null; try { if ( !Currentfile.toString().equals( sourcefilefolder.toString() ) ) { // Pass over the Base folder itself // return destination short filename String sourceshortfilename = Currentfile.getName().getBaseName(); String shortfilename = sourceshortfilename; try { shortfilename = getDestinationFilename( sourceshortfilename ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", Currentfile.getName().getBaseName(), e.toString() ) ) ); return entrystatus; } int lenCurrent = sourceshortfilename.length(); String short_filename_from_basefolder = shortfilename; if ( !isDoNotKeepFolderStructure() ) { short_filename_from_basefolder = Currentfile.toString().substring( sourcefilefolder.toString().length(), Currentfile.toString().length() ); } short_filename_from_basefolder = short_filename_from_basefolder.substring( 0, short_filename_from_basefolder.length() - lenCurrent ) + shortfilename; // Built destination filename file_name = KettleVFS.getFileObject( realDestinationFilefoldername + Const.FILE_SEPARATOR + short_filename_from_basefolder, this ); if ( !Currentfile.getParent().toString().equals( sourcefilefolder.toString() ) ) { // Not in the Base Folder..Only if include sub folders if ( include_subfolders ) { // Folders..only if include subfolders if ( Currentfile.getType() == FileType.FOLDER ) { if ( include_subfolders && move_empty_folders && Const.isEmpty( wildcard ) ) { entrystatus = MoveFile( shortfilename, Currentfile, file_name, movetofolderfolder, parentJob, result ); } } else { if ( GetFileWildcard( sourceshortfilename, realWildcard ) ) { entrystatus = MoveFile( shortfilename, Currentfile, file_name, movetofolderfolder, parentJob, result ); } } } } else { // In the Base Folder... // Folders..only if include subfolders if ( Currentfile.getType() == FileType.FOLDER ) { if ( include_subfolders && move_empty_folders && Const.isEmpty( wildcard ) ) { entrystatus = MoveFile( shortfilename, Currentfile, file_name, movetofolderfolder, parentJob, result ); } } else { // file...Check if exists if ( GetFileWildcard( sourceshortfilename, realWildcard ) ) { entrystatus = MoveFile( shortfilename, Currentfile, file_name, movetofolderfolder, parentJob, result ); } } } } entrystatus = true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error", e.toString() ) ); } finally { if ( file_name != null ) { try { file_name.close(); } catch ( IOException ex ) { /* Ignore */ } } } return entrystatus; } private void updateErrors() { NrErrors++; if ( checkIfSuccessConditionBroken() ) { // Success condition was broken successConditionBroken = true; } } private boolean checkIfSuccessConditionBroken() { boolean retval = false; if ( ( NrErrors > 0 && getSuccessCondition().equals( SUCCESS_IF_NO_ERRORS ) ) || ( NrErrors >= limitFiles && getSuccessCondition().equals( SUCCESS_IF_ERRORS_LESS ) ) ) { retval = true; } return retval; } private void updateSuccess() { NrSuccess++; } private void addFileToResultFilenames( String fileaddentry, Result result, Job parentJob ) { try { ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( fileaddentry, this ), parentJob .getJobname(), toString() ); result.getResultFiles().put( resultFile.getFile().toString(), resultFile ); if ( log.isDebug() ) { logDebug( " ------ " ); logDebug( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileAddedToResultFilesName", fileaddentry ) ); } } catch ( Exception e ) { log.logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.AddingToFilenameResult" ), fileaddentry + "" + e.getMessage() ); } } private boolean CreateDestinationFolder( FileObject filefolder ) { FileObject folder = null; try { if ( destination_is_a_file ) { folder = filefolder.getParent(); } else { folder = filefolder; } if ( !folder.exists() ) { if ( create_destination_folder ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FolderNotExist", folder .getName().toString() ) ); } folder.createFolder(); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FolderWasCreated", folder .getName().toString() ) ); } } else { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.FolderNotExist", folder.getName().toString() ) ); return false; } } return true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.CanNotCreateParentFolder", folder .getName().toString() ), e ); } finally { if ( folder != null ) { try { folder.close(); } catch ( Exception ex ) { /* Ignore */ } } } return false; } /********************************************************** * * @param selectedfile * @param wildcard * @return True if the selectedfile matches the wildcard **********************************************************/ private boolean GetFileWildcard( String selectedfile, String wildcard ) { Pattern pattern = null; boolean getIt = true; if ( !Const.isEmpty( wildcard ) ) { pattern = Pattern.compile( wildcard ); // First see if the file matches the regular expression! if ( pattern != null ) { Matcher matcher = pattern.matcher( selectedfile ); getIt = matcher.matches(); } } return getIt; } private String getDestinationFilename( String shortsourcefilename ) throws Exception { String shortfilename = shortsourcefilename; int lenstring = shortsourcefilename.length(); int lastindexOfDot = shortfilename.lastIndexOf( '.' ); if ( lastindexOfDot == -1 ) { lastindexOfDot = lenstring; } if ( isAddDateBeforeExtension() ) { shortfilename = shortfilename.substring( 0, lastindexOfDot ); } SimpleDateFormat daf = new SimpleDateFormat(); Date now = new Date(); if ( isSpecifyFormat() && !Const.isEmpty( getDateTimeFormat() ) ) { daf.applyPattern( getDateTimeFormat() ); String dt = daf.format( now ); shortfilename += dt; } else { if ( isAddDate() ) { daf.applyPattern( "yyyyMMdd" ); String d = daf.format( now ); shortfilename += "_" + d; } if ( isAddTime() ) { daf.applyPattern( "HHmmssSSS" ); String t = daf.format( now ); shortfilename += "_" + t; } } if ( isAddDateBeforeExtension() ) { shortfilename += shortsourcefilename.substring( lastindexOfDot, lenstring ); } return shortfilename; } private String getMoveDestinationFilename( String shortsourcefilename, String DateFormat ) throws Exception { String shortfilename = shortsourcefilename; int lenstring = shortsourcefilename.length(); int lastindexOfDot = shortfilename.lastIndexOf( '.' ); if ( lastindexOfDot == -1 ) { lastindexOfDot = lenstring; } if ( isAddMovedDateBeforeExtension() ) { shortfilename = shortfilename.substring( 0, lastindexOfDot ); } SimpleDateFormat daf = new SimpleDateFormat(); Date now = new Date(); if ( DateFormat != null ) { daf.applyPattern( DateFormat ); String dt = daf.format( now ); shortfilename += dt; } else { if ( isSpecifyMoveFormat() && !Const.isEmpty( getMovedDateTimeFormat() ) ) { daf.applyPattern( getMovedDateTimeFormat() ); String dt = daf.format( now ); shortfilename += dt; } else { if ( isAddMovedDate() ) { daf.applyPattern( "yyyyMMdd" ); String d = daf.format( now ); shortfilename += "_" + d; } if ( isAddMovedTime() ) { daf.applyPattern( "HHmmssSSS" ); String t = daf.format( now ); shortfilename += "_" + t; } } } if ( isAddMovedDateBeforeExtension() ) { shortfilename += shortsourcefilename.substring( lastindexOfDot, lenstring ); } return shortfilename; } public void setAddDate( boolean adddate ) { this.add_date = adddate; } public boolean isAddDate() { return add_date; } public boolean isAddMovedDate() { return add_moved_date; } public void setAddMovedDate( boolean add_moved_date ) { this.add_moved_date = add_moved_date; } public boolean isAddMovedTime() { return add_moved_time; } public void setAddMovedTime( boolean add_moved_time ) { this.add_moved_time = add_moved_time; } public void setIfFileExists( String iffileexists ) { this.iffileexists = iffileexists; } public String getIfFileExists() { return iffileexists; } public void setIfMovedFileExists( String ifmovedfileexists ) { this.ifmovedfileexists = ifmovedfileexists; } public String getIfMovedFileExists() { return ifmovedfileexists; } public void setAddTime( boolean addtime ) { this.add_time = addtime; } public boolean isAddTime() { return add_time; } public void setAddDateBeforeExtension( boolean AddDateBeforeExtension ) { this.AddDateBeforeExtension = AddDateBeforeExtension; } public void setAddMovedDateBeforeExtension( boolean AddMovedDateBeforeExtension ) { this.AddMovedDateBeforeExtension = AddMovedDateBeforeExtension; } public boolean isSpecifyFormat() { return SpecifyFormat; } public void setSpecifyFormat( boolean SpecifyFormat ) { this.SpecifyFormat = SpecifyFormat; } public void setSpecifyMoveFormat( boolean SpecifyMoveFormat ) { this.SpecifyMoveFormat = SpecifyMoveFormat; } public boolean isSpecifyMoveFormat() { return SpecifyMoveFormat; } public String getDateTimeFormat() { return date_time_format; } public void setDateTimeFormat( String date_time_format ) { this.date_time_format = date_time_format; } public String getMovedDateTimeFormat() { return moved_date_time_format; } public void setMovedDateTimeFormat( String moved_date_time_format ) { this.moved_date_time_format = moved_date_time_format; } public boolean isAddDateBeforeExtension() { return AddDateBeforeExtension; } public boolean isAddMovedDateBeforeExtension() { return AddMovedDateBeforeExtension; } public boolean isDoNotKeepFolderStructure() { return DoNotKeepFolderStructure; } public void setDestinationFolder( String destinationFolder ) { this.destinationFolder = destinationFolder; } public String getDestinationFolder() { return destinationFolder; } public void setDoNotKeepFolderStructure( boolean DoNotKeepFolderStructure ) { this.DoNotKeepFolderStructure = DoNotKeepFolderStructure; } public void setMoveEmptyFolders( boolean move_empty_foldersin ) { this.move_empty_folders = move_empty_foldersin; } public void setIncludeSubfolders( boolean include_subfoldersin ) { this.include_subfolders = include_subfoldersin; } public void setAddresultfilesname( boolean add_result_filesnamein ) { this.add_result_filesname = add_result_filesnamein; } public void setArgFromPrevious( boolean argfrompreviousin ) { this.arg_from_previous = argfrompreviousin; } public void setDestinationIsAFile( boolean destination_is_a_file ) { this.destination_is_a_file = destination_is_a_file; } public void setCreateDestinationFolder( boolean create_destination_folder ) { this.create_destination_folder = create_destination_folder; } public void setCreateMoveToFolder( boolean create_move_to_folder ) { this.create_move_to_folder = create_move_to_folder; } public void setNrErrorsLessThan( String nr_errors_less_than ) { this.nr_errors_less_than = nr_errors_less_than; } public String getNrErrorsLessThan() { return nr_errors_less_than; } public void setSimulate( boolean simulate ) { this.simulate = simulate; } public void setSuccessCondition( String success_condition ) { this.success_condition = success_condition; } public String getSuccessCondition() { return success_condition; } public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean res = andValidator().validate( this, "arguments", remarks, putValidators( notNullValidator() ) ); if ( res == false ) { return; } ValidatorContext ctx = new ValidatorContext(); putVariableSpace( ctx, getVariables() ); putValidators( ctx, notNullValidator(), fileExistsValidator() ); for ( int i = 0; i < source_filefolder.length; i++ ) { andValidator().validate( this, "arguments[" + i + "]", remarks, ctx ); } } public boolean evaluates() { return true; } }
engine/src/org/pentaho/di/job/entries/movefiles/JobEntryMoveFiles.java
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.movefiles; import static org.pentaho.di.job.entry.validator.AbstractFileValidator.putVariableSpace; import static org.pentaho.di.job.entry.validator.AndValidator.putValidators; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileExistsValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notNullValidator; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.vfs.AllFileSelector; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.FileSelectInfo; import org.apache.commons.vfs.FileType; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.Job; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.job.entry.validator.ValidatorContext; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /** * This defines a 'move files' job entry. * * @author Samatar Hassan * @since 25-02-2008 */ public class JobEntryMoveFiles extends JobEntryBase implements Cloneable, JobEntryInterface { private static Class<?> PKG = JobEntryMoveFiles.class; // for i18n purposes, needed by Translator2!! public boolean move_empty_folders; public boolean arg_from_previous; public boolean include_subfolders; public boolean add_result_filesname; public boolean destination_is_a_file; public boolean create_destination_folder; public String[] source_filefolder; public String[] destination_filefolder; public String[] wildcard; private String nr_errors_less_than; private String success_condition; public String SUCCESS_IF_AT_LEAST_X_FILES_UN_ZIPPED = "success_when_at_least"; public String SUCCESS_IF_ERRORS_LESS = "success_if_errors_less"; public String SUCCESS_IF_NO_ERRORS = "success_if_no_errors"; private boolean add_date; private boolean add_time; private boolean SpecifyFormat; private String date_time_format; private boolean AddDateBeforeExtension; private boolean DoNotKeepFolderStructure; private String iffileexists; private String destinationFolder; private String ifmovedfileexists; private String moved_date_time_format; private boolean AddMovedDateBeforeExtension; private boolean add_moved_date; private boolean add_moved_time; private boolean SpecifyMoveFormat; public boolean create_move_to_folder; public boolean simulate; int NrErrors = 0; int NrSuccess = 0; boolean successConditionBroken = false; boolean successConditionBrokenExit = false; int limitFiles = 0; public JobEntryMoveFiles( String n ) { super( n, "" ); simulate = false; create_move_to_folder = false; SpecifyMoveFormat = false; add_moved_date = false; add_moved_time = false; AddMovedDateBeforeExtension = false; moved_date_time_format = null; ifmovedfileexists = "do_nothing"; destinationFolder = null; DoNotKeepFolderStructure = false; move_empty_folders = true; arg_from_previous = false; source_filefolder = null; destination_filefolder = null; wildcard = null; include_subfolders = false; add_result_filesname = false; destination_is_a_file = false; create_destination_folder = false; nr_errors_less_than = "10"; success_condition = SUCCESS_IF_NO_ERRORS; add_date = false; add_time = false; SpecifyFormat = false; date_time_format = null; AddDateBeforeExtension = false; iffileexists = "do_nothing"; } public JobEntryMoveFiles() { this( "" ); } public Object clone() { JobEntryMoveFiles je = (JobEntryMoveFiles) super.clone(); return je; } public String getXML() { StringBuffer retval = new StringBuffer( 300 ); retval.append( super.getXML() ); retval.append( " " ).append( XMLHandler.addTagValue( "move_empty_folders", move_empty_folders ) ); retval.append( " " ).append( XMLHandler.addTagValue( "arg_from_previous", arg_from_previous ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", include_subfolders ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_result_filesname", add_result_filesname ) ); retval.append( " " ).append( XMLHandler.addTagValue( "destination_is_a_file", destination_is_a_file ) ); retval.append( " " ).append( XMLHandler.addTagValue( "create_destination_folder", create_destination_folder ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_date", add_date ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_time", add_time ) ); retval.append( " " ).append( XMLHandler.addTagValue( "SpecifyFormat", SpecifyFormat ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_time_format", date_time_format ) ); retval.append( " " ).append( XMLHandler.addTagValue( "nr_errors_less_than", nr_errors_less_than ) ); retval.append( " " ).append( XMLHandler.addTagValue( "success_condition", success_condition ) ); retval.append( " " ).append( XMLHandler.addTagValue( "AddDateBeforeExtension", AddDateBeforeExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "DoNotKeepFolderStructure", DoNotKeepFolderStructure ) ); retval.append( " " ).append( XMLHandler.addTagValue( "iffileexists", iffileexists ) ); retval.append( " " ).append( XMLHandler.addTagValue( "destinationFolder", destinationFolder ) ); retval.append( " " ).append( XMLHandler.addTagValue( "ifmovedfileexists", ifmovedfileexists ) ); retval.append( " " ).append( XMLHandler.addTagValue( "moved_date_time_format", moved_date_time_format ) ); retval.append( " " ).append( XMLHandler.addTagValue( "create_move_to_folder", create_move_to_folder ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_moved_date", add_moved_date ) ); retval.append( " " ).append( XMLHandler.addTagValue( "add_moved_time", add_moved_time ) ); retval.append( " " ).append( XMLHandler.addTagValue( "SpecifyMoveFormat", SpecifyMoveFormat ) ); retval.append( " " ).append( XMLHandler.addTagValue( "AddMovedDateBeforeExtension", AddMovedDateBeforeExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "simulate", simulate ) ); retval.append( " <fields>" ).append( Const.CR ); if ( source_filefolder != null ) { for ( int i = 0; i < source_filefolder.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "source_filefolder", source_filefolder[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "destination_filefolder", destination_filefolder[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", wildcard[i] ) ); retval.append( " </field>" ).append( Const.CR ); } } retval.append( " </fields>" ).append( Const.CR ); return retval.toString(); } public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); move_empty_folders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "move_empty_folders" ) ); arg_from_previous = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) ); include_subfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) ); add_result_filesname = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_result_filesname" ) ); destination_is_a_file = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "destination_is_a_file" ) ); create_destination_folder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "create_destination_folder" ) ); add_date = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_date" ) ); add_time = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_time" ) ); SpecifyFormat = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "SpecifyFormat" ) ); AddDateBeforeExtension = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "AddDateBeforeExtension" ) ); DoNotKeepFolderStructure = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "DoNotKeepFolderStructure" ) ); date_time_format = XMLHandler.getTagValue( entrynode, "date_time_format" ); nr_errors_less_than = XMLHandler.getTagValue( entrynode, "nr_errors_less_than" ); success_condition = XMLHandler.getTagValue( entrynode, "success_condition" ); iffileexists = XMLHandler.getTagValue( entrynode, "iffileexists" ); destinationFolder = XMLHandler.getTagValue( entrynode, "destinationFolder" ); ifmovedfileexists = XMLHandler.getTagValue( entrynode, "ifmovedfileexists" ); moved_date_time_format = XMLHandler.getTagValue( entrynode, "moved_date_time_format" ); AddMovedDateBeforeExtension = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "AddMovedDateBeforeExtension" ) ); create_move_to_folder = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "create_move_to_folder" ) ); add_moved_date = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_moved_date" ) ); add_moved_time = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_moved_time" ) ); SpecifyMoveFormat = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "SpecifyMoveFormat" ) ); simulate = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "simulate" ) ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); source_filefolder = new String[nrFields]; destination_filefolder = new String[nrFields]; wildcard = new String[nrFields]; // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); source_filefolder[i] = XMLHandler.getTagValue( fnode, "source_filefolder" ); destination_filefolder[i] = XMLHandler.getTagValue( fnode, "destination_filefolder" ); wildcard[i] = XMLHandler.getTagValue( fnode, "wildcard" ); } } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.UnableLoadXML" ), xe ); } } public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { move_empty_folders = rep.getJobEntryAttributeBoolean( id_jobentry, "move_empty_folders" ); arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); add_result_filesname = rep.getJobEntryAttributeBoolean( id_jobentry, "add_result_filesname" ); destination_is_a_file = rep.getJobEntryAttributeBoolean( id_jobentry, "destination_is_a_file" ); create_destination_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_destination_folder" ); nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" ); success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" ); add_date = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" ); add_time = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" ); SpecifyFormat = rep.getJobEntryAttributeBoolean( id_jobentry, "SpecifyFormat" ); date_time_format = rep.getJobEntryAttributeString( id_jobentry, "date_time_format" ); AddDateBeforeExtension = rep.getJobEntryAttributeBoolean( id_jobentry, "AddDateBeforeExtension" ); DoNotKeepFolderStructure = rep.getJobEntryAttributeBoolean( id_jobentry, "DoNotKeepFolderStructure" ); iffileexists = rep.getJobEntryAttributeString( id_jobentry, "iffileexists" ); destinationFolder = rep.getJobEntryAttributeString( id_jobentry, "destinationFolder" ); ifmovedfileexists = rep.getJobEntryAttributeString( id_jobentry, "ifmovedfileexists" ); moved_date_time_format = rep.getJobEntryAttributeString( id_jobentry, "moved_date_time_format" ); AddMovedDateBeforeExtension = rep.getJobEntryAttributeBoolean( id_jobentry, "AddMovedDateBeforeExtension" ); create_move_to_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_move_to_folder" ); add_moved_date = rep.getJobEntryAttributeBoolean( id_jobentry, "add_moved_date" ); add_moved_time = rep.getJobEntryAttributeBoolean( id_jobentry, "add_moved_time" ); SpecifyMoveFormat = rep.getJobEntryAttributeBoolean( id_jobentry, "SpecifyMoveFormat" ); simulate = rep.getJobEntryAttributeBoolean( id_jobentry, "simulate" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); source_filefolder = new String[argnr]; destination_filefolder = new String[argnr]; wildcard = new String[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" ); destination_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "destination_filefolder" ); wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.UnableLoadRep" ) + id_jobentry, dbe ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "move_empty_folders", move_empty_folders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", arg_from_previous ); rep.saveJobEntryAttribute( id_job, getObjectId(), "include_subfolders", include_subfolders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_result_filesname", add_result_filesname ); rep.saveJobEntryAttribute( id_job, getObjectId(), "destination_is_a_file", destination_is_a_file ); rep.saveJobEntryAttribute( id_job, getObjectId(), "create_destination_folder", create_destination_folder ); rep.saveJobEntryAttribute( id_job, getObjectId(), "nr_errors_less_than", nr_errors_less_than ); rep.saveJobEntryAttribute( id_job, getObjectId(), "success_condition", success_condition ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_date", add_date ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_time", add_time ); rep.saveJobEntryAttribute( id_job, getObjectId(), "SpecifyFormat", SpecifyFormat ); rep.saveJobEntryAttribute( id_job, getObjectId(), "date_time_format", date_time_format ); rep.saveJobEntryAttribute( id_job, getObjectId(), "AddDateBeforeExtension", AddDateBeforeExtension ); rep.saveJobEntryAttribute( id_job, getObjectId(), "DoNotKeepFolderStructure", DoNotKeepFolderStructure ); rep.saveJobEntryAttribute( id_job, getObjectId(), "iffileexists", iffileexists ); rep.saveJobEntryAttribute( id_job, getObjectId(), "destinationFolder", destinationFolder ); rep.saveJobEntryAttribute( id_job, getObjectId(), "ifmovedfileexists", ifmovedfileexists ); rep.saveJobEntryAttribute( id_job, getObjectId(), "moved_date_time_format", moved_date_time_format ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_moved_date", add_moved_date ); rep.saveJobEntryAttribute( id_job, getObjectId(), "add_moved_time", add_moved_time ); rep.saveJobEntryAttribute( id_job, getObjectId(), "SpecifyMoveFormat", SpecifyMoveFormat ); rep.saveJobEntryAttribute( id_job, getObjectId(), "create_move_to_folder", create_move_to_folder ); rep .saveJobEntryAttribute( id_job, getObjectId(), "AddMovedDateBeforeExtension", AddMovedDateBeforeExtension ); rep.saveJobEntryAttribute( id_job, getObjectId(), "simulate", simulate ); // save the arguments... if ( source_filefolder != null ) { for ( int i = 0; i < source_filefolder.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "source_filefolder", source_filefolder[i] ); rep .saveJobEntryAttribute( id_job, getObjectId(), i, "destination_filefolder", destination_filefolder[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "wildcard", wildcard[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.UnableSaveRep" ) + id_job, dbe ); } } public Result execute( Result previousResult, int nr ) throws KettleException { Result result = previousResult; List<RowMetaAndData> rows = result.getRows(); RowMetaAndData resultRow = null; result.setNrErrors( 1 ); result.setResult( false ); NrErrors = 0; NrSuccess = 0; successConditionBroken = false; successConditionBrokenExit = false; limitFiles = Const.toInt( environmentSubstitute( getNrErrorsLessThan() ), 10 ); if ( log.isDetailed() ) { if ( simulate ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.SimulationOn" ) ); } if ( include_subfolders ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.IncludeSubFoldersOn" ) ); } } String MoveToFolder = environmentSubstitute( destinationFolder ); // Get source and destination files, also wildcard String[] vsourcefilefolder = source_filefolder; String[] vdestinationfilefolder = destination_filefolder; String[] vwildcard = wildcard; if ( iffileexists.equals( "move_file" ) ) { if ( Const.isEmpty( MoveToFolder ) ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.MoveToFolderMissing" ) ); return result; } FileObject folder = null; try { folder = KettleVFS.getFileObject( MoveToFolder, this ); if ( !folder.exists() ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.FolderMissing", MoveToFolder ) ); } if ( create_move_to_folder ) { folder.createFolder(); } else { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.FolderMissing", MoveToFolder ) ); return result; } } if ( !folder.getType().equals( FileType.FOLDER ) ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.NotFolder", MoveToFolder ) ); return result; } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error.GettingMoveToFolder", MoveToFolder, e .getMessage() ) ); return result; } finally { if ( folder != null ) { try { folder.close(); } catch ( IOException ex ) { /* Ignore */ } } } } if ( arg_from_previous ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.ArgFromPrevious.Found", ( rows != null ? rows .size() : 0 ) + "" ) ); } } if ( arg_from_previous && rows != null ) { for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) { // Success condition broken? if ( successConditionBroken ) { if ( !successConditionBrokenExit ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.SuccessConditionbroken", "" + NrErrors ) ); successConditionBrokenExit = true; } result.setNrErrors( NrErrors ); displayResults(); return result; } resultRow = rows.get( iteration ); // Get source and destination file names, also wildcard String vsourcefilefolder_previous = resultRow.getString( 0, null ); String vdestinationfilefolder_previous = resultRow.getString( 1, null ); String vwildcard_previous = resultRow.getString( 2, null ); if ( !Const.isEmpty( vsourcefilefolder_previous ) && !Const.isEmpty( vdestinationfilefolder_previous ) ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.ProcessingRow", vsourcefilefolder_previous, vdestinationfilefolder_previous, vwildcard_previous ) ); } if ( !ProcessFileFolder( vsourcefilefolder_previous, vdestinationfilefolder_previous, vwildcard_previous, parentJob, result, MoveToFolder ) ) { // The move process fail // Update Errors updateErrors(); } } else { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.IgnoringRow", vsourcefilefolder[iteration], vdestinationfilefolder[iteration], vwildcard[iteration] ) ); } } } } else if ( vsourcefilefolder != null && vdestinationfilefolder != null ) { for ( int i = 0; i < vsourcefilefolder.length && !parentJob.isStopped(); i++ ) { // Success condition broken? if ( successConditionBroken ) { if ( !successConditionBrokenExit ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.SuccessConditionbroken", "" + NrErrors ) ); successConditionBrokenExit = true; } result.setNrErrors( NrErrors ); displayResults(); return result; } if ( !Const.isEmpty( vsourcefilefolder[i] ) && !Const.isEmpty( vdestinationfilefolder[i] ) ) { // ok we can process this file/folder if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.ProcessingRow", vsourcefilefolder[i], vdestinationfilefolder[i], vwildcard[i] ) ); } if ( !ProcessFileFolder( vsourcefilefolder[i], vdestinationfilefolder[i], vwildcard[i], parentJob, result, MoveToFolder ) ) { // Update Errors updateErrors(); } } else { if ( log.isDetailed() ) { logDetailed( BaseMessages .getString( PKG, "JobMoveFiles.Log.IgnoringRow", vsourcefilefolder[i], vdestinationfilefolder[i], vwildcard[i] ) ); } } } } // Success Condition result.setNrErrors( NrErrors ); result.setNrLinesWritten( NrSuccess ); if ( getSuccessStatus() ) { result.setResult( true ); } displayResults(); return result; } private void displayResults() { if ( log.isDetailed() ) { logDetailed( "=======================================" ); logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.Info.FilesInError", "" + NrErrors ) ); logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.Info.FilesInSuccess", "" + NrSuccess ) ); logDetailed( "=======================================" ); } } private boolean getSuccessStatus() { boolean retval = false; if ( ( NrErrors == 0 && getSuccessCondition().equals( SUCCESS_IF_NO_ERRORS ) ) || ( NrSuccess >= limitFiles && getSuccessCondition().equals( SUCCESS_IF_AT_LEAST_X_FILES_UN_ZIPPED ) ) || ( NrErrors <= limitFiles && getSuccessCondition().equals( SUCCESS_IF_ERRORS_LESS ) ) ) { retval = true; } return retval; } private boolean ProcessFileFolder( String sourcefilefoldername, String destinationfilefoldername, String wildcard, Job parentJob, Result result, String MoveToFolder ) { boolean entrystatus = false; FileObject sourcefilefolder = null; FileObject destinationfilefolder = null; FileObject movetofolderfolder = null; FileObject Currentfile = null; // Get real source, destination file and wildcard String realSourceFilefoldername = environmentSubstitute( sourcefilefoldername ); String realDestinationFilefoldername = environmentSubstitute( destinationfilefoldername ); String realWildcard = environmentSubstitute( wildcard ); try { sourcefilefolder = KettleVFS.getFileObject( realSourceFilefoldername, this ); destinationfilefolder = KettleVFS.getFileObject( realDestinationFilefoldername, this ); if ( !Const.isEmpty( MoveToFolder ) ) { movetofolderfolder = KettleVFS.getFileObject( MoveToFolder, this ); } if ( sourcefilefolder.exists() ) { // Check if destination folder/parent folder exists ! // If user wanted and if destination folder does not exist // PDI will create it if ( CreateDestinationFolder( destinationfilefolder ) ) { // Basic Tests if ( sourcefilefolder.getType().equals( FileType.FOLDER ) && destination_is_a_file ) { // Source is a folder, destination is a file // WARNING !!! CAN NOT MOVE FOLDER TO FILE !!! log.logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Forbidden" ), BaseMessages.getString( PKG, "JobMoveFiles.Log.CanNotMoveFolderToFile", realSourceFilefoldername, realDestinationFilefoldername ) ); // Update Errors updateErrors(); } else { if ( destinationfilefolder.getType().equals( FileType.FOLDER ) && sourcefilefolder.getType().equals( FileType.FILE ) ) { // Source is a file, destination is a folder // return destination short filename String shortfilename = sourcefilefolder.getName().getBaseName(); try { shortfilename = getDestinationFilename( shortfilename ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", sourcefilefolder.getName().getBaseName(), e .toString() ) ) ); return entrystatus; } // Move the file to the destination folder String destinationfilenamefull = KettleVFS.getFilename( destinationfilefolder ) + Const.FILE_SEPARATOR + shortfilename; FileObject destinationfile = KettleVFS.getFileObject( destinationfilenamefull, this ); entrystatus = MoveFile( shortfilename, sourcefilefolder, destinationfile, movetofolderfolder, parentJob, result ); return entrystatus; } else if ( sourcefilefolder.getType().equals( FileType.FILE ) && destination_is_a_file ) { // Source is a file, destination is a file FileObject destinationfile = KettleVFS.getFileObject( realDestinationFilefoldername, this ); // return destination short filename String shortfilename = destinationfile.getName().getBaseName(); try { shortfilename = getDestinationFilename( shortfilename ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", sourcefilefolder.getName().getBaseName(), e .toString() ) ) ); return entrystatus; } String destinationfilenamefull = KettleVFS.getFilename( destinationfile.getParent() ) + Const.FILE_SEPARATOR + shortfilename; destinationfile = KettleVFS.getFileObject( destinationfilenamefull, this ); entrystatus = MoveFile( shortfilename, sourcefilefolder, destinationfile, movetofolderfolder, parentJob, result ); return entrystatus; } else { // Both source and destination are folders if ( log.isDetailed() ) { logDetailed( " " ); logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FetchFolder", sourcefilefolder .toString() ) ); } FileObject[] fileObjects = sourcefilefolder.findFiles( new AllFileSelector() { public boolean traverseDescendents( FileSelectInfo info ) { return true; } public boolean includeFile( FileSelectInfo info ) { FileObject fileObject = info.getFile(); try { if ( fileObject == null ) { return false; } } catch ( Exception ex ) { // Upon error don't process the file. return false; } finally { if ( fileObject != null ) { try { fileObject.close(); } catch ( IOException ex ) { /* Ignore */ } } } return true; } } ); if ( fileObjects != null ) { for ( int j = 0; j < fileObjects.length && !parentJob.isStopped(); j++ ) { // Success condition broken? if ( successConditionBroken ) { if ( !successConditionBrokenExit ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.SuccessConditionbroken", "" + NrErrors ) ); successConditionBrokenExit = true; } return false; } // Fetch files in list one after one ... Currentfile = fileObjects[j]; if ( !MoveOneFile( Currentfile, sourcefilefolder, realDestinationFilefoldername, realWildcard, parentJob, result, movetofolderfolder ) ) { // Update Errors updateErrors(); } } } } } entrystatus = true; } else { // Destination Folder or Parent folder is missing logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.DestinationFolderNotFound", realDestinationFilefoldername ) ); } } else { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.SourceFileNotExists", realSourceFilefoldername ) ); } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.MoveProcess", realSourceFilefoldername .toString(), destinationfilefolder.toString(), e.getMessage() ) ); } finally { if ( sourcefilefolder != null ) { try { sourcefilefolder.close(); } catch ( IOException ex ) { /* Ignore */ } } if ( destinationfilefolder != null ) { try { destinationfilefolder.close(); } catch ( IOException ex ) { /* Ignore */ } } if ( Currentfile != null ) { try { Currentfile.close(); } catch ( IOException ex ) { /* Ignore */ } } if ( movetofolderfolder != null ) { try { movetofolderfolder.close(); } catch ( IOException ex ) { /* Ignore */ } } } return entrystatus; } private boolean MoveFile( String shortfilename, FileObject sourcefilename, FileObject destinationfilename, FileObject movetofolderfolder, Job parentJob, Result result ) { FileObject destinationfile = null; boolean retval = false; try { if ( !destinationfilename.exists() ) { if ( !simulate ) { sourcefilename.moveTo( destinationfilename ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileMoved", sourcefilename .getName().toString(), destinationfilename.getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfilename.toString(), result, parentJob ); } updateSuccess(); retval = true; } else { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileExists", destinationfilename.toString() ) ); } if ( iffileexists.equals( "overwrite_file" ) ) { if ( !simulate ) { sourcefilename.moveTo( destinationfilename ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileOverwrite", destinationfilename .getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfilename.toString(), result, parentJob ); } updateSuccess(); retval = true; } else if ( iffileexists.equals( "unique_name" ) ) { String short_filename = shortfilename; // return destination short filename try { short_filename = getMoveDestinationFilename( short_filename, "ddMMyyyy_HHmmssSSS" ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", short_filename ) ), e ); return retval; } String movetofilenamefull = destinationfilename.getParent().toString() + Const.FILE_SEPARATOR + short_filename; destinationfile = KettleVFS.getFileObject( movetofilenamefull, this ); if ( !simulate ) { sourcefilename.moveTo( destinationfile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileMoved", sourcefilename .getName().toString(), destinationfile.getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfile.toString(), result, parentJob ); } updateSuccess(); retval = true; } else if ( iffileexists.equals( "delete_file" ) ) { if ( !simulate ) { destinationfilename.delete(); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileDeleted", destinationfilename .getName().toString() ) ); } } else if ( iffileexists.equals( "move_file" ) ) { String short_filename = shortfilename; // return destination short filename try { short_filename = getMoveDestinationFilename( short_filename, null ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", short_filename ) ), e ); return retval; } String movetofilenamefull = movetofolderfolder.toString() + Const.FILE_SEPARATOR + short_filename; destinationfile = KettleVFS.getFileObject( movetofilenamefull, this ); if ( !destinationfile.exists() ) { if ( !simulate ) { sourcefilename.moveTo( destinationfile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileMoved", sourcefilename .getName().toString(), destinationfile.getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfile.toString(), result, parentJob ); } } else { if ( ifmovedfileexists.equals( "overwrite_file" ) ) { if ( !simulate ) { sourcefilename.moveTo( destinationfile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileOverwrite", destinationfile .getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfile.toString(), result, parentJob ); } updateSuccess(); retval = true; } else if ( ifmovedfileexists.equals( "unique_name" ) ) { SimpleDateFormat daf = new SimpleDateFormat(); Date now = new Date(); daf.applyPattern( "ddMMyyyy_HHmmssSSS" ); String dt = daf.format( now ); short_filename += "_" + dt; String destinationfilenamefull = movetofolderfolder.toString() + Const.FILE_SEPARATOR + short_filename; destinationfile = KettleVFS.getFileObject( destinationfilenamefull, this ); if ( !simulate ) { sourcefilename.moveTo( destinationfile ); } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileMoved", destinationfile .getName().toString() ) ); } // add filename to result filename if ( add_result_filesname && !iffileexists.equals( "fail" ) && !iffileexists.equals( "do_nothing" ) ) { addFileToResultFilenames( destinationfile.toString(), result, parentJob ); } updateSuccess(); retval = true; } else if ( ifmovedfileexists.equals( "fail" ) ) { // Update Errors updateErrors(); } } } else if ( iffileexists.equals( "fail" ) ) { // Update Errors updateErrors(); } } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.Exception.MoveProcessError", sourcefilename .toString(), destinationfilename.toString(), e.getMessage() ) ); updateErrors(); } finally { if ( destinationfile != null ) { try { destinationfile.close(); } catch ( IOException ex ) { /* Ignore */ } } } return retval; } private boolean MoveOneFile( FileObject Currentfile, FileObject sourcefilefolder, String realDestinationFilefoldername, String realWildcard, Job parentJob, Result result, FileObject movetofolderfolder ) { boolean entrystatus = false; FileObject file_name = null; try { if ( !Currentfile.toString().equals( sourcefilefolder.toString() ) ) { // Pass over the Base folder itself // return destination short filename String sourceshortfilename = Currentfile.getName().getBaseName(); String shortfilename = sourceshortfilename; try { shortfilename = getDestinationFilename( sourceshortfilename ); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, BaseMessages.getString( PKG, "JobMoveFiles.Error.GettingFilename", Currentfile.getName().getBaseName(), e.toString() ) ) ); return entrystatus; } int lenCurrent = sourceshortfilename.length(); String short_filename_from_basefolder = shortfilename; if ( !isDoNotKeepFolderStructure() ) { short_filename_from_basefolder = Currentfile.toString().substring( sourcefilefolder.toString().length(), Currentfile.toString().length() ); } short_filename_from_basefolder = short_filename_from_basefolder.substring( 0, short_filename_from_basefolder.length() - lenCurrent ) + shortfilename; // Built destination filename file_name = KettleVFS.getFileObject( realDestinationFilefoldername + Const.FILE_SEPARATOR + short_filename_from_basefolder, this ); if ( !Currentfile.getParent().toString().equals( sourcefilefolder.toString() ) ) { // Not in the Base Folder..Only if include sub folders if ( include_subfolders ) { // Folders..only if include subfolders if ( Currentfile.getType() == FileType.FOLDER ) { if ( include_subfolders && move_empty_folders && Const.isEmpty( wildcard ) ) { entrystatus = MoveFile( shortfilename, Currentfile, file_name, movetofolderfolder, parentJob, result ); } } else { if ( GetFileWildcard( sourceshortfilename, realWildcard ) ) { entrystatus = MoveFile( shortfilename, Currentfile, file_name, movetofolderfolder, parentJob, result ); } } } } else { // In the Base Folder... // Folders..only if include subfolders if ( Currentfile.getType() == FileType.FOLDER ) { if ( include_subfolders && move_empty_folders && Const.isEmpty( wildcard ) ) { entrystatus = MoveFile( shortfilename, Currentfile, file_name, movetofolderfolder, parentJob, result ); } } else { // file...Check if exists if ( GetFileWildcard( sourceshortfilename, realWildcard ) ) { entrystatus = MoveFile( shortfilename, Currentfile, file_name, movetofolderfolder, parentJob, result ); } } } } entrystatus = true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.Error", e.toString() ) ); } finally { if ( file_name != null ) { try { file_name.close(); } catch ( IOException ex ) { /* Ignore */ } } } return entrystatus; } private void updateErrors() { NrErrors++; if ( checkIfSuccessConditionBroken() ) { // Success condition was broken successConditionBroken = true; } } private boolean checkIfSuccessConditionBroken() { boolean retval = false; if ( ( NrErrors > 0 && getSuccessCondition().equals( SUCCESS_IF_NO_ERRORS ) ) || ( NrErrors >= limitFiles && getSuccessCondition().equals( SUCCESS_IF_ERRORS_LESS ) ) ) { retval = true; } return retval; } private void updateSuccess() { NrSuccess++; } private void addFileToResultFilenames( String fileaddentry, Result result, Job parentJob ) { try { ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject( fileaddentry, this ), parentJob .getJobname(), toString() ); result.getResultFiles().put( resultFile.getFile().toString(), resultFile ); if ( log.isDebug() ) { logDebug( " ------ " ); logDebug( BaseMessages.getString( PKG, "JobMoveFiles.Log.FileAddedToResultFilesName", fileaddentry ) ); } } catch ( Exception e ) { log.logError( BaseMessages.getString( PKG, "JobMoveFiles.Error.AddingToFilenameResult" ), fileaddentry + "" + e.getMessage() ); } } private boolean CreateDestinationFolder( FileObject filefolder ) { FileObject folder = null; try { if ( destination_is_a_file ) { folder = filefolder.getParent(); } else { folder = filefolder; } if ( !folder.exists() ) { if ( create_destination_folder ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FolderNotExist", folder .getName().toString() ) ); } folder.createFolder(); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobMoveFiles.Log.FolderWasCreated", folder .getName().toString() ) ); } } else { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.FolderNotExist", folder.getName().toString() ) ); return false; } } return true; } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobMoveFiles.Log.CanNotCreateParentFolder", folder .getName().toString() ), e ); } finally { if ( folder != null ) { try { folder.close(); } catch ( Exception ex ) { /* Ignore */ } } } return false; } /********************************************************** * * @param selectedfile * @param wildcard * @return True if the selectedfile matches the wildcard **********************************************************/ private boolean GetFileWildcard( String selectedfile, String wildcard ) { Pattern pattern = null; boolean getIt = true; if ( !Const.isEmpty( wildcard ) ) { pattern = Pattern.compile( wildcard ); // First see if the file matches the regular expression! if ( pattern != null ) { Matcher matcher = pattern.matcher( selectedfile ); getIt = matcher.matches(); } } return getIt; } private String getDestinationFilename( String shortsourcefilename ) throws Exception { String shortfilename = shortsourcefilename; int lenstring = shortsourcefilename.length(); int lastindexOfDot = shortfilename.lastIndexOf( '.' ); if ( lastindexOfDot == -1 ) { lastindexOfDot = lenstring; } if ( isAddDateBeforeExtension() ) { shortfilename = shortfilename.substring( 0, lastindexOfDot ); } SimpleDateFormat daf = new SimpleDateFormat(); Date now = new Date(); if ( isSpecifyFormat() && !Const.isEmpty( getDateTimeFormat() ) ) { daf.applyPattern( getDateTimeFormat() ); String dt = daf.format( now ); shortfilename += dt; } else { if ( isAddDate() ) { daf.applyPattern( "yyyyMMdd" ); String d = daf.format( now ); shortfilename += "_" + d; } if ( isAddTime() ) { daf.applyPattern( "HHmmssSSS" ); String t = daf.format( now ); shortfilename += "_" + t; } } if ( isAddDateBeforeExtension() ) { shortfilename += shortsourcefilename.substring( lastindexOfDot, lenstring ); } return shortfilename; } private String getMoveDestinationFilename( String shortsourcefilename, String DateFormat ) throws Exception { String shortfilename = shortsourcefilename; int lenstring = shortsourcefilename.length(); int lastindexOfDot = shortfilename.lastIndexOf( '.' ); if ( lastindexOfDot == -1 ) { lastindexOfDot = lenstring; } if ( isAddMovedDateBeforeExtension() ) { shortfilename = shortfilename.substring( 0, lastindexOfDot ); } SimpleDateFormat daf = new SimpleDateFormat(); Date now = new Date(); if ( DateFormat != null ) { daf.applyPattern( DateFormat ); String dt = daf.format( now ); shortfilename += dt; } else { if ( isSpecifyMoveFormat() && !Const.isEmpty( getMovedDateTimeFormat() ) ) { daf.applyPattern( getMovedDateTimeFormat() ); String dt = daf.format( now ); shortfilename += dt; } else { if ( isAddMovedDate() ) { daf.applyPattern( "yyyyMMdd" ); String d = daf.format( now ); shortfilename += "_" + d; } if ( isAddMovedTime() ) { daf.applyPattern( "HHmmssSSS" ); String t = daf.format( now ); shortfilename += "_" + t; } } } if ( isAddMovedDateBeforeExtension() ) { shortfilename += shortsourcefilename.substring( lastindexOfDot, lenstring ); } return shortfilename; } public void setAddDate( boolean adddate ) { this.add_date = adddate; } public boolean isAddDate() { return add_date; } public boolean isAddMovedDate() { return add_moved_date; } public void setAddMovedDate( boolean add_moved_date ) { this.add_moved_date = add_moved_date; } public boolean isAddMovedTime() { return add_moved_time; } public void setAddMovedTime( boolean add_moved_time ) { this.add_moved_time = add_moved_time; } public void setIfFileExists( String iffileexists ) { this.iffileexists = iffileexists; } public String getIfFileExists() { return iffileexists; } public void setIfMovedFileExists( String ifmovedfileexists ) { this.ifmovedfileexists = ifmovedfileexists; } public String getIfMovedFileExists() { return ifmovedfileexists; } public void setAddTime( boolean addtime ) { this.add_time = addtime; } public boolean isAddTime() { return add_time; } public void setAddDateBeforeExtension( boolean AddDateBeforeExtension ) { this.AddDateBeforeExtension = AddDateBeforeExtension; } public void setAddMovedDateBeforeExtension( boolean AddMovedDateBeforeExtension ) { this.AddMovedDateBeforeExtension = AddMovedDateBeforeExtension; } public boolean isSpecifyFormat() { return SpecifyFormat; } public void setSpecifyFormat( boolean SpecifyFormat ) { this.SpecifyFormat = SpecifyFormat; } public void setSpecifyMoveFormat( boolean SpecifyMoveFormat ) { this.SpecifyMoveFormat = SpecifyMoveFormat; } public boolean isSpecifyMoveFormat() { return SpecifyMoveFormat; } public String getDateTimeFormat() { return date_time_format; } public void setDateTimeFormat( String date_time_format ) { this.date_time_format = date_time_format; } public String getMovedDateTimeFormat() { return moved_date_time_format; } public void setMovedDateTimeFormat( String moved_date_time_format ) { this.moved_date_time_format = moved_date_time_format; } public boolean isAddDateBeforeExtension() { return AddDateBeforeExtension; } public boolean isAddMovedDateBeforeExtension() { return AddMovedDateBeforeExtension; } public boolean isDoNotKeepFolderStructure() { return DoNotKeepFolderStructure; } public void setDestinationFolder( String destinationFolder ) { this.destinationFolder = destinationFolder; } public String getDestinationFolder() { return destinationFolder; } public void setDoNotKeepFolderStructure( boolean DoNotKeepFolderStructure ) { this.DoNotKeepFolderStructure = DoNotKeepFolderStructure; } public void setMoveEmptyFolders( boolean move_empty_foldersin ) { this.move_empty_folders = move_empty_foldersin; } public void setIncludeSubfolders( boolean include_subfoldersin ) { this.include_subfolders = include_subfoldersin; } public void setAddresultfilesname( boolean add_result_filesnamein ) { this.add_result_filesname = add_result_filesnamein; } public void setArgFromPrevious( boolean argfrompreviousin ) { this.arg_from_previous = argfrompreviousin; } public void setDestinationIsAFile( boolean destination_is_a_file ) { this.destination_is_a_file = destination_is_a_file; } public void setCreateDestinationFolder( boolean create_destination_folder ) { this.create_destination_folder = create_destination_folder; } public void setCreateMoveToFolder( boolean create_move_to_folder ) { this.create_move_to_folder = create_move_to_folder; } public void setNrErrorsLessThan( String nr_errors_less_than ) { this.nr_errors_less_than = nr_errors_less_than; } public String getNrErrorsLessThan() { return nr_errors_less_than; } public void setSimulate( boolean simulate ) { this.simulate = simulate; } public void setSuccessCondition( String success_condition ) { this.success_condition = success_condition; } public String getSuccessCondition() { return success_condition; } public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean res = andValidator().validate( this, "arguments", remarks, putValidators( notNullValidator() ) ); if ( res == false ) { return; } ValidatorContext ctx = new ValidatorContext(); putVariableSpace( ctx, getVariables() ); putValidators( ctx, notNullValidator(), fileExistsValidator() ); for ( int i = 0; i < source_filefolder.length; i++ ) { andValidator().validate( this, "arguments[" + i + "]", remarks, ctx ); } } public boolean evaluates() { return true; } }
PDI-12539 - Move Files Job Entry Deletes Wrong File
engine/src/org/pentaho/di/job/entries/movefiles/JobEntryMoveFiles.java
PDI-12539 - Move Files Job Entry Deletes Wrong File
Java
apache-2.0
e4d98e5c19bc75baa67f9c9aa5a66bbc6d673b1b
0
GoogleCloudPlatform/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp
/* * Copyright 2017-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spring.logging; import static org.assertj.core.api.Assertions.assertThat; import java.util.List; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; /** Tests for the logging-web-mvc configurer. */ @ExtendWith(MockitoExtension.class) class LoggingWebMvcConfigurerTests { @Mock private TraceIdLoggingWebMvcInterceptor interceptor; @Test void testAddInterceptors() { LoggingWebMvcConfigurer adapter = new LoggingWebMvcConfigurer(this.interceptor, () -> "remission"); TestInterceptorRegistry registry = new TestInterceptorRegistry(); adapter.addInterceptors(registry); assertThat(registry.doGetInterceptors()).hasSize(1); assertThat(registry.doGetInterceptors().get(0)).isEqualTo(this.interceptor); } /** Test interceptor registry that makes interceptors list accessible. */ private static class TestInterceptorRegistry extends InterceptorRegistry { public List<Object> doGetInterceptors() { return super.getInterceptors(); } } }
spring-cloud-gcp-logging/src/test/java/com/google/cloud/spring/logging/LoggingWebMvcConfigurerTests.java
/* * Copyright 2017-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spring.logging; import static org.assertj.core.api.Assertions.assertThat; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; /** Tests for the logging-web-mvc configurer. */ @RunWith(MockitoJUnitRunner.class) public class LoggingWebMvcConfigurerTests { @Mock private TraceIdLoggingWebMvcInterceptor interceptor; @Test public void testAddInterceptors() { LoggingWebMvcConfigurer adapter = new LoggingWebMvcConfigurer(this.interceptor, () -> "remission"); TestInterceptorRegistry registry = new TestInterceptorRegistry(); adapter.addInterceptors(registry); assertThat(registry.doGetInterceptors()).hasSize(1); assertThat(registry.doGetInterceptors().get(0)).isEqualTo(this.interceptor); } /** Test interceptor registry that makes interceptors list accessible. */ private static class TestInterceptorRegistry extends InterceptorRegistry { public List<Object> doGetInterceptors() { return super.getInterceptors(); } } }
Migrating the tests to JUnit5 (#1011)
spring-cloud-gcp-logging/src/test/java/com/google/cloud/spring/logging/LoggingWebMvcConfigurerTests.java
Migrating the tests to JUnit5 (#1011)
Java
apache-2.0
40ca96ef10023788e09516469787d40f825e1174
0
petermr/cmine,petermr/cmine
package org.xmlcml.cmine.args; import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.apache.log4j.Logger; import org.junit.Ignore; import org.junit.Test; import junit.framework.Assert; public class DefaultArgProcessorTest { private static final Logger LOG = Logger.getLogger(DefaultArgProcessorTest.class); static { LOG.setLevel(org.apache.log4j.Level.DEBUG); } @Test @Ignore // side-effects creates files public void testArgs() { String[] args = { "-i", "foo", "bar", "-o", "plugh", }; DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs(args); Assert.assertEquals("input", 2, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo", argProcessor.getInputList().get(0)); Assert.assertEquals("input", "bar", argProcessor.getInputList().get(1)); Assert.assertEquals("output", "plugh", argProcessor.getOutput()); } @Test public void testSingleWildcards() { String[] args = { "-i", "foo{1:3}bof", "bar{a|b|zzz}plugh", }; DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs(args); Assert.assertEquals("input", 2, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo{1:3}bof", argProcessor.getInputList().get(1)); Assert.assertEquals("input", "bar{a|b|zzz}plugh", argProcessor.getInputList().get(0)); argProcessor.expandWildcardsExhaustively(); Assert.assertEquals("input", 6, argProcessor.getInputList().size()); Assert.assertEquals("input", "baraplugh", argProcessor.getInputList().get(0)); Assert.assertEquals("input", "barbplugh", argProcessor.getInputList().get(1)); Assert.assertEquals("input", "barzzzplugh", argProcessor.getInputList().get(2)); Assert.assertEquals("input", "foo1bof", argProcessor.getInputList().get(3)); Assert.assertEquals("input", "foo2bof", argProcessor.getInputList().get(4)); Assert.assertEquals("input", "foo3bof", argProcessor.getInputList().get(5)); } @Test public void testMultipleWildcards() { String[] args = { "-i", "foo{1:3}bof{3:6}plugh", }; DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs(args); Assert.assertEquals("input", 1, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo{1:3}bof{3:6}plugh", argProcessor.getInputList().get(0)); argProcessor.expandWildcardsExhaustively(); Assert.assertEquals("input", 12, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo1bof3plugh", argProcessor.getInputList().get(0)); } @Test public void testArgCounts() { String[] args = {"-o", "foo"}; new DefaultArgProcessor().parseArgs(args); try { args = new String[]{"-o", "foo", "bar"}; new DefaultArgProcessor().parseArgs(args); } catch (Exception e) { Assert.assertEquals("too many arguments", "cannot process argument: -o (IllegalArgumentException: --output; argument count (2) is not compatible with {1,1})", e.getMessage()); } } @Test public void testMakeDocs() { String args = "--makedocs"; DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs(args); argProcessor.runAndOutput(); } @Test public void testVersion() { DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs("--version"); } @Test public void testProject() { DefaultArgProcessor argProcessor = new DefaultArgProcessor(); try { argProcessor.parseArgs("--project"); Assert.fail("should trap zero arguments"); } catch (Exception e) { // OK } argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs("--project foo"); } @Test public void testLog() throws IOException { DefaultArgProcessor argProcessor = new DefaultArgProcessor(); File targetFile = new File("target/test/log/"); targetFile.mkdirs(); // dummy file FileUtils.write(new File(targetFile, "fulltext.txt"), "fulltext"); argProcessor.parseArgs("-q "+targetFile+" -i fulltext.txt --c.test --log"); argProcessor.runAndOutput(); } }
src/test/java/org/xmlcml/cmine/args/DefaultArgProcessorTest.java
package org.xmlcml.cmine.args; import java.io.File; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.apache.log4j.Logger; import org.junit.Ignore; import org.junit.Test; import junit.framework.Assert; public class DefaultArgProcessorTest { private static final Logger LOG = Logger.getLogger(DefaultArgProcessorTest.class); static { LOG.setLevel(org.apache.log4j.Level.DEBUG); } @Test @Ignore // side-effects creates files public void testArgs() { String[] args = { "-i", "foo", "bar", "-o", "plugh", }; DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs(args); Assert.assertEquals("input", 2, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo", argProcessor.getInputList().get(0)); Assert.assertEquals("input", "bar", argProcessor.getInputList().get(1)); Assert.assertEquals("output", "plugh", argProcessor.getOutput()); } @Test public void testSingleWildcards() { String[] args = { "-i", "foo{1:3}bof", "bar{a|b|zzz}plugh", }; DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs(args); Assert.assertEquals("input", 2, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo{1:3}bof", argProcessor.getInputList().get(0)); Assert.assertEquals("input", "bar{a|b|zzz}plugh", argProcessor.getInputList().get(1)); argProcessor.expandWildcardsExhaustively(); Assert.assertEquals("input", 6, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo1bof", argProcessor.getInputList().get(0)); Assert.assertEquals("input", "foo2bof", argProcessor.getInputList().get(1)); Assert.assertEquals("input", "foo3bof", argProcessor.getInputList().get(2)); Assert.assertEquals("input", "baraplugh", argProcessor.getInputList().get(3)); Assert.assertEquals("input", "barbplugh", argProcessor.getInputList().get(4)); Assert.assertEquals("input", "barzzzplugh", argProcessor.getInputList().get(5)); } @Test public void testMultipleWildcards() { String[] args = { "-i", "foo{1:3}bof{3:6}plugh", }; DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs(args); Assert.assertEquals("input", 1, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo{1:3}bof{3:6}plugh", argProcessor.getInputList().get(0)); argProcessor.expandWildcardsExhaustively(); Assert.assertEquals("input", 12, argProcessor.getInputList().size()); Assert.assertEquals("input", "foo1bof3plugh", argProcessor.getInputList().get(0)); } @Test public void testArgCounts() { String[] args = {"-o", "foo"}; new DefaultArgProcessor().parseArgs(args); try { args = new String[]{"-o", "foo", "bar"}; new DefaultArgProcessor().parseArgs(args); } catch (Exception e) { Assert.assertEquals("too many arguments", "cannot process argument: -o (IllegalArgumentException: --output; argument count (2) is not compatible with {1,1})", e.getMessage()); } } @Test public void testMakeDocs() { String args = "--makedocs"; DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs(args); argProcessor.runAndOutput(); } @Test public void testVersion() { DefaultArgProcessor argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs("--version"); } @Test public void testProject() { DefaultArgProcessor argProcessor = new DefaultArgProcessor(); try { argProcessor.parseArgs("--project"); Assert.fail("should trap zero arguments"); } catch (Exception e) { // OK } argProcessor = new DefaultArgProcessor(); argProcessor.parseArgs("--project foo"); } @Test public void testLog() throws IOException { DefaultArgProcessor argProcessor = new DefaultArgProcessor(); File targetFile = new File("target/test/log/"); targetFile.mkdirs(); // dummy file FileUtils.write(new File(targetFile, "fulltext.txt"), "fulltext"); argProcessor.parseArgs("-q "+targetFile+" -i fulltext.txt --c.test --log"); argProcessor.runAndOutput(); } }
added sort
src/test/java/org/xmlcml/cmine/args/DefaultArgProcessorTest.java
added sort
Java
apache-2.0
aaee316d38ea18240fb2adc4c8f2f6fddd31b3fb
0
isharac/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,isharac/carbon-apimgt,wso2/carbon-apimgt,chamilaadhi/carbon-apimgt,praminda/carbon-apimgt,chamilaadhi/carbon-apimgt,isharac/carbon-apimgt,wso2/carbon-apimgt,isharac/carbon-apimgt,tharindu1st/carbon-apimgt,malinthaprasan/carbon-apimgt,wso2/carbon-apimgt,tharindu1st/carbon-apimgt,ruks/carbon-apimgt,malinthaprasan/carbon-apimgt,chamilaadhi/carbon-apimgt,malinthaprasan/carbon-apimgt,ruks/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,prasa7/carbon-apimgt,praminda/carbon-apimgt,tharindu1st/carbon-apimgt,ruks/carbon-apimgt,wso2/carbon-apimgt,chamilaadhi/carbon-apimgt,prasa7/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,praminda/carbon-apimgt,tharindu1st/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,prasa7/carbon-apimgt,prasa7/carbon-apimgt,malinthaprasan/carbon-apimgt,ruks/carbon-apimgt
/* * Copyright (c) 2005-2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMException; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.Constants; import org.apache.axis2.util.JavaUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.solr.client.solrj.util.ClientUtils; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.wso2.carbon.CarbonConstants; import org.wso2.carbon.apimgt.api.APIDefinition; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIMgtResourceAlreadyExistsException; import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException; import org.wso2.carbon.apimgt.api.APIProvider; import org.wso2.carbon.apimgt.api.ErrorItem; import org.wso2.carbon.apimgt.api.ExceptionCodes; import org.wso2.carbon.apimgt.api.FaultGatewaysException; import org.wso2.carbon.apimgt.api.MonetizationException; import org.wso2.carbon.apimgt.api.UnsupportedPolicyTypeException; import org.wso2.carbon.apimgt.api.WorkflowResponse; import org.wso2.carbon.apimgt.api.doc.model.APIResource; import org.wso2.carbon.apimgt.api.dto.CertificateInformationDTO; import org.wso2.carbon.apimgt.api.dto.CertificateMetadataDTO; import org.wso2.carbon.apimgt.api.dto.ClientCertificateDTO; import org.wso2.carbon.apimgt.api.dto.KeyManagerConfigurationDTO; import org.wso2.carbon.apimgt.api.dto.UserApplicationAPIUsage; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APICategory; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIInfo; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.APIProductIdentifier; import org.wso2.carbon.apimgt.api.model.APIProductResource; import org.wso2.carbon.apimgt.api.model.APIRevision; import org.wso2.carbon.apimgt.api.model.APIRevisionDeployment; import org.wso2.carbon.apimgt.api.model.APIStateChangeResponse; import org.wso2.carbon.apimgt.api.model.APIStore; import org.wso2.carbon.apimgt.api.model.ApiTypeWrapper; import org.wso2.carbon.apimgt.api.model.BlockConditionsDTO; import org.wso2.carbon.apimgt.api.model.Comment; import org.wso2.carbon.apimgt.api.model.DeployedAPIRevision; import org.wso2.carbon.apimgt.api.model.Documentation; import org.wso2.carbon.apimgt.api.model.Documentation.DocumentSourceType; import org.wso2.carbon.apimgt.api.model.Documentation.DocumentVisibility; import org.wso2.carbon.apimgt.api.model.DocumentationContent; import org.wso2.carbon.apimgt.api.model.DocumentationType; import org.wso2.carbon.apimgt.api.model.EndpointSecurity; import org.wso2.carbon.apimgt.api.model.Identifier; import org.wso2.carbon.apimgt.api.model.KeyManager; import org.wso2.carbon.apimgt.api.model.LifeCycleEvent; import org.wso2.carbon.apimgt.api.model.Mediation; import org.wso2.carbon.apimgt.api.model.Monetization; import org.wso2.carbon.apimgt.api.model.Provider; import org.wso2.carbon.apimgt.api.model.ResourceFile; import org.wso2.carbon.apimgt.api.model.ResourcePath; import org.wso2.carbon.apimgt.api.model.Scope; import org.wso2.carbon.apimgt.api.model.SharedScopeUsage; import org.wso2.carbon.apimgt.api.model.SubscribedAPI; import org.wso2.carbon.apimgt.api.model.Subscriber; import org.wso2.carbon.apimgt.api.model.SwaggerData; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.URITemplate; import org.wso2.carbon.apimgt.api.model.Usage; import org.wso2.carbon.apimgt.api.model.policy.APIPolicy; import org.wso2.carbon.apimgt.api.model.policy.ApplicationPolicy; import org.wso2.carbon.apimgt.api.model.policy.Condition; import org.wso2.carbon.apimgt.api.model.policy.GlobalPolicy; import org.wso2.carbon.apimgt.api.model.policy.Pipeline; import org.wso2.carbon.apimgt.api.model.policy.Policy; import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants; import org.wso2.carbon.apimgt.api.model.policy.SubscriptionPolicy; import org.wso2.carbon.apimgt.impl.certificatemgt.CertificateManager; import org.wso2.carbon.apimgt.impl.certificatemgt.CertificateManagerImpl; import org.wso2.carbon.apimgt.impl.certificatemgt.ResponseCode; import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO; import org.wso2.carbon.apimgt.impl.dao.GatewayArtifactsMgtDAO; import org.wso2.carbon.apimgt.impl.dao.ServiceCatalogDAO; import org.wso2.carbon.apimgt.impl.definitions.GraphQLSchemaDefinition; import org.wso2.carbon.apimgt.impl.definitions.OAS3Parser; import org.wso2.carbon.apimgt.impl.definitions.OASParserUtil; import org.wso2.carbon.apimgt.impl.dto.JwtTokenInfoDTO; import org.wso2.carbon.apimgt.impl.dto.KeyManagerDto; import org.wso2.carbon.apimgt.impl.dto.SubscribedApiDTO; import org.wso2.carbon.apimgt.impl.dto.ThrottleProperties; import org.wso2.carbon.apimgt.impl.dto.TierPermissionDTO; import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO; import org.wso2.carbon.apimgt.impl.dto.WorkflowProperties; import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder; import org.wso2.carbon.apimgt.impl.gatewayartifactsynchronizer.ArtifactSaver; import org.wso2.carbon.apimgt.impl.gatewayartifactsynchronizer.exception.ArtifactSynchronizerException; import org.wso2.carbon.apimgt.impl.importexport.APIImportExportException; import org.wso2.carbon.apimgt.impl.importexport.ExportFormat; import org.wso2.carbon.apimgt.impl.importexport.ImportExportAPI; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.monetization.DefaultMonetizationImpl; import org.wso2.carbon.apimgt.impl.notification.NotificationDTO; import org.wso2.carbon.apimgt.impl.notification.NotificationExecutor; import org.wso2.carbon.apimgt.impl.notification.NotifierConstants; import org.wso2.carbon.apimgt.impl.notification.exception.NotificationException; import org.wso2.carbon.apimgt.impl.notifier.events.APIEvent; import org.wso2.carbon.apimgt.impl.notifier.events.APIPolicyEvent; import org.wso2.carbon.apimgt.impl.notifier.events.ApplicationPolicyEvent; import org.wso2.carbon.apimgt.impl.notifier.events.CertificateEvent; import org.wso2.carbon.apimgt.impl.notifier.events.GlobalPolicyEvent; import org.wso2.carbon.apimgt.impl.notifier.events.ScopeEvent; import org.wso2.carbon.apimgt.impl.notifier.events.SubscriptionEvent; import org.wso2.carbon.apimgt.impl.notifier.events.SubscriptionPolicyEvent; import org.wso2.carbon.apimgt.impl.publishers.WSO2APIPublisher; import org.wso2.carbon.apimgt.impl.recommendationmgt.RecommendationEnvironment; import org.wso2.carbon.apimgt.impl.recommendationmgt.RecommenderDetailsExtractor; import org.wso2.carbon.apimgt.impl.recommendationmgt.RecommenderEventPublisher; import org.wso2.carbon.apimgt.impl.token.ApiKeyGenerator; import org.wso2.carbon.apimgt.impl.token.ClaimsRetriever; import org.wso2.carbon.apimgt.impl.token.InternalAPIKeyGenerator; import org.wso2.carbon.apimgt.impl.utils.APIAuthenticationAdminClient; import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader; import org.wso2.carbon.apimgt.impl.utils.APINameComparator; import org.wso2.carbon.apimgt.impl.utils.APIProductNameComparator; import org.wso2.carbon.apimgt.impl.utils.APIStoreNameComparator; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.impl.utils.APIVersionComparator; import org.wso2.carbon.apimgt.impl.utils.APIVersionStringComparator; import org.wso2.carbon.apimgt.impl.utils.ContentSearchResultNameComparator; import org.wso2.carbon.apimgt.impl.workflow.APIStateWorkflowDTO; import org.wso2.carbon.apimgt.impl.workflow.WorkflowConstants; import org.wso2.carbon.apimgt.impl.workflow.WorkflowException; import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutor; import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutorFactory; import org.wso2.carbon.apimgt.impl.workflow.WorkflowStatus; import org.wso2.carbon.apimgt.impl.wsdl.WSDLProcessor; import org.wso2.carbon.apimgt.persistence.LCManagerFactory; import org.wso2.carbon.apimgt.persistence.dto.DocumentContent; import org.wso2.carbon.apimgt.persistence.dto.DocumentSearchContent; import org.wso2.carbon.apimgt.persistence.dto.DocumentSearchResult; import org.wso2.carbon.apimgt.persistence.dto.MediationInfo; import org.wso2.carbon.apimgt.persistence.dto.Organization; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPI; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPIInfo; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPIProduct; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPIProductInfo; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPIProductSearchResult; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPISearchResult; import org.wso2.carbon.apimgt.persistence.dto.PublisherContentSearchResult; import org.wso2.carbon.apimgt.persistence.dto.PublisherSearchContent; import org.wso2.carbon.apimgt.persistence.dto.SearchContent; import org.wso2.carbon.apimgt.persistence.dto.UserContext; import org.wso2.carbon.apimgt.persistence.exceptions.APIPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.AsyncSpecPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.DocumentationPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.GraphQLPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.MediationPolicyPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.OASPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.PersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.ThumbnailPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.WSDLPersistenceException; import org.wso2.carbon.apimgt.persistence.mapper.APIMapper; import org.wso2.carbon.apimgt.persistence.mapper.APIProductMapper; import org.wso2.carbon.apimgt.persistence.mapper.DocumentMapper; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.databridge.commons.Event; import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.GenericArtifactManager; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.governance.api.util.GovernanceUtils; import org.wso2.carbon.governance.custom.lifecycles.checklist.beans.LifecycleBean; import org.wso2.carbon.governance.custom.lifecycles.checklist.util.CheckListItem; import org.wso2.carbon.governance.custom.lifecycles.checklist.util.LifecycleBeanPopulator; import org.wso2.carbon.governance.custom.lifecycles.checklist.util.Property; import org.wso2.carbon.governance.lcm.util.CommonUtil; import org.wso2.carbon.registry.core.ActionConstants; import org.wso2.carbon.registry.core.Association; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.config.RegistryContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.realm.RegistryAuthorizationManager; import org.wso2.carbon.registry.core.pagination.PaginationContext; import org.wso2.carbon.registry.core.service.RegistryService; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.user.api.AuthorizationManager; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.api.UserStoreManager; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.SortedSet; import java.util.StringTokenizer; import java.util.TreeSet; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import javax.cache.Cache; import javax.cache.Caching; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; /** * This class provides the core API provider functionality. It is implemented in a very * self-contained and 'pure' manner, without taking requirements like security into account, * which are subject to frequent change. Due to this 'pure' nature and the significance of * the class to the overall API management functionality, the visibility of the class has * been reduced to package level. This means we can still use it for internal purposes and * possibly even extend it, but it's totally off the limits of the users. Users wishing to * pragmatically access this functionality should use one of the extensions of this * class which is visible to them. These extensions may add additional features like * security to this class. */ class APIProviderImpl extends AbstractAPIManager implements APIProvider { private static final Log log = LogFactory.getLog(APIProviderImpl.class); private ServiceCatalogDAO serviceCatalogDAO = ServiceCatalogDAO.getInstance(); private final String userNameWithoutChange; private CertificateManager certificateManager; protected ArtifactSaver artifactSaver; protected ImportExportAPI importExportAPI; protected GatewayArtifactsMgtDAO gatewayArtifactsMgtDAO; private RecommendationEnvironment recommendationEnvironment; public APIProviderImpl(String username) throws APIManagementException { super(username); this.userNameWithoutChange = username; certificateManager = CertificateManagerImpl.getInstance(); this.artifactSaver = ServiceReferenceHolder.getInstance().getArtifactSaver(); this.importExportAPI = ServiceReferenceHolder.getInstance().getImportExportService(); this.gatewayArtifactsMgtDAO = GatewayArtifactsMgtDAO.getInstance(); this.recommendationEnvironment = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getApiRecommendationEnvironment(); } protected String getUserNameWithoutChange() { return userNameWithoutChange; } /** * Returns a list of all #{@link org.wso2.carbon.apimgt.api.model.Provider} available on the system. * * @return Set<Provider> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get Providers */ @Override public Set<Provider> getAllProviders() throws APIManagementException { Set<Provider> providerSet = new HashSet<Provider>(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.PROVIDER_KEY); try { if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when fetching providers."; log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact[] genericArtifact = artifactManager.getAllGenericArtifacts(); if (genericArtifact == null || genericArtifact.length == 0) { return providerSet; } for (GenericArtifact artifact : genericArtifact) { Provider provider = new Provider(artifact.getAttribute(APIConstants.PROVIDER_OVERVIEW_NAME)); provider.setDescription(APIConstants.PROVIDER_OVERVIEW_DESCRIPTION); provider.setEmail(APIConstants.PROVIDER_OVERVIEW_EMAIL); providerSet.add(provider); } } catch (GovernanceException e) { handleException("Failed to get all providers", e); } return providerSet; } /** * Get a list of APIs published by the given provider. If a given API has multiple APIs, * only the latest version will * be included in this list. * * @param providerId , provider id * @return set of API * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get set of API */ @Override public List<API> getAPIsByProvider(String providerId) throws APIManagementException { List<API> apiSortedList = new ArrayList<API>(); try { providerId = APIUtil.replaceEmailDomain(providerId); String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId; GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION); for (Association association : associations) { String apiPath = association.getDestinationPath(); if (registry.resourceExists(apiPath)) { Resource resource = registry.get(apiPath); String apiArtifactId = resource.getUUID(); if (apiArtifactId != null) { GenericArtifact apiArtifact = artifactManager.getGenericArtifact(apiArtifactId); if (apiArtifact != null) { String type = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_TYPE); if (!APIConstants.API_PRODUCT.equals(type)) { apiSortedList.add(getAPI(apiArtifact)); } } } else { throw new GovernanceException("artifact id is null of " + apiPath); } } } } catch (RegistryException e) { handleException("Failed to get APIs for provider : " + providerId, e); } Collections.sort(apiSortedList, new APINameComparator()); return apiSortedList; } /** * Get a list of all the consumers for all APIs * * @param providerId if of the provider * @return Set<Subscriber> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get subscribed APIs of given provider */ @Override @Deprecated public Set<Subscriber> getSubscribersOfProvider(String providerId) throws APIManagementException { Set<Subscriber> subscriberSet = null; try { subscriberSet = apiMgtDAO.getSubscribersOfProvider(providerId); } catch (APIManagementException e) { handleException("Failed to get Subscribers for : " + providerId, e); } return subscriberSet; } /** * get details of provider * * @param providerName name of the provider * @return Provider * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get Provider */ @Override public Provider getProvider(String providerName) throws APIManagementException { Provider provider = null; String providerPath = APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.PROVIDERS_PATH + RegistryConstants.PATH_SEPARATOR + providerName; try { GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.PROVIDER_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when getting provider " + providerName; log.error(errorMessage); throw new APIManagementException(errorMessage); } Resource providerResource = registry.get(providerPath); String artifactId = providerResource.getUUID(); if (artifactId == null) { throw new APIManagementException("artifact it is null"); } GenericArtifact providerArtifact = artifactManager.getGenericArtifact(artifactId); provider = APIUtil.getProvider(providerArtifact); } catch (RegistryException e) { handleException("Failed to get Provider form : " + providerName, e); } return provider; } /** * Return Usage of given APIIdentifier * * @param apiIdentifier APIIdentifier * @return Usage */ @Override public Usage getUsageByAPI(APIIdentifier apiIdentifier) { return null; } /** * Return Usage of given provider and API * * @param providerId if of the provider * @param apiName name of the API * @return Usage */ @Override public Usage getAPIUsageByUsers(String providerId, String apiName) { return null; } /** * Returns usage details of all APIs published by a provider * * @param providerName Provider Id * @return UserApplicationAPIUsages for given provider * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to get UserApplicationAPIUsage */ @Override public UserApplicationAPIUsage[] getAllAPIUsageByProvider(String providerName) throws APIManagementException { return apiMgtDAO.getAllAPIUsageByProvider(providerName); } /** * Returns usage details of a particular API * * @param uuid API uuid * @param organization identifier of the organization * @return UserApplicationAPIUsages for given provider * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to get UserApplicationAPIUsage */ @Override public List<SubscribedAPI> getAPIUsageByAPIId(String uuid, String organization) throws APIManagementException { APIIdentifier identifier = apiMgtDAO.getAPIIdentifierFromUUID(uuid); List<SubscribedAPI> subscribedAPIs = new ArrayList<>(); if (identifier != null) { APIIdentifier apiIdEmailReplaced = new APIIdentifier(APIUtil.replaceEmailDomain(identifier.getProviderName()), identifier.getApiName(), identifier.getVersion()); UserApplicationAPIUsage[] allApiResult = apiMgtDAO.getAllAPIUsageByProviderAndApiId(uuid, organization); for (UserApplicationAPIUsage usage : allApiResult) { for (SubscribedAPI apiSubscription : usage.getApiSubscriptions()) { APIIdentifier subsApiId = apiSubscription.getApiId(); APIIdentifier subsApiIdEmailReplaced = new APIIdentifier( APIUtil.replaceEmailDomain(subsApiId.getProviderName()), subsApiId.getApiName(), subsApiId.getVersion()); if (subsApiIdEmailReplaced.equals(apiIdEmailReplaced)) { subscribedAPIs.add(apiSubscription); } } } } return subscribedAPIs; } /** * Returns usage details of a particular API * * @param apiProductId API Product identifier * @return UserApplicationAPIUsages for given provider * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to get UserApplicationAPIUsage */ @Override public List<SubscribedAPI> getAPIProductUsageByAPIProductId(APIProductIdentifier apiProductId) throws APIManagementException { APIProductIdentifier apiIdEmailReplaced = new APIProductIdentifier(APIUtil.replaceEmailDomain(apiProductId.getProviderName()), apiProductId.getName(), apiProductId.getVersion()); UserApplicationAPIUsage[] allApiProductResult = apiMgtDAO.getAllAPIProductUsageByProvider(apiProductId.getProviderName()); List<SubscribedAPI> subscribedAPIs = new ArrayList<>(); for (UserApplicationAPIUsage usage : allApiProductResult) { for (SubscribedAPI apiSubscription : usage.getApiSubscriptions()) { APIProductIdentifier subsApiProductId = apiSubscription.getProductId(); APIProductIdentifier subsApiProductIdEmailReplaced = new APIProductIdentifier( APIUtil.replaceEmailDomain(subsApiProductId.getProviderName()), subsApiProductId.getName(), subsApiProductId.getVersion()); if (subsApiProductIdEmailReplaced.equals(apiIdEmailReplaced)) { subscribedAPIs.add(apiSubscription); } } } return subscribedAPIs; } /** * Shows how a given consumer uses the given API. * * @param apiIdentifier APIIdentifier * @param consumerEmail E-mal Address of consumer * @return Usage */ @Override public Usage getAPIUsageBySubscriber(APIIdentifier apiIdentifier, String consumerEmail) { return null; } /** * Returns full list of Subscribers of an API * * @param identifier APIIdentifier * @return Set<Subscriber> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get Subscribers */ @Override public Set<Subscriber> getSubscribersOfAPI(APIIdentifier identifier) throws APIManagementException { Set<Subscriber> subscriberSet = null; try { subscriberSet = apiMgtDAO.getSubscribersOfAPI(identifier); } catch (APIManagementException e) { handleException("Failed to get subscribers for API : " + identifier.getApiName(), e); } return subscriberSet; } /** * Returns full list of subscriptions of an API * * @param apiName Name of the API * @param apiVersion Version of the API * @param provider Name of API creator * @return All subscriptions of a given API * @throws APIManagementException if failed to get Subscribers */ public List<SubscribedAPI> getSubscriptionsOfAPI(String apiName, String apiVersion, String provider) throws APIManagementException { return apiMgtDAO.getSubscriptionsOfAPI(apiName, apiVersion, provider); } /** * this method returns the Set<APISubscriptionCount> for given provider and api * * @param identifier APIIdentifier * @return Set<APISubscriptionCount> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get APISubscriptionCountByAPI */ @Override public long getAPISubscriptionCountByAPI(APIIdentifier identifier) throws APIManagementException { long count = 0L; try { count = apiMgtDAO.getAPISubscriptionCountByAPI(identifier); } catch (APIManagementException e) { handleException("Failed to get APISubscriptionCount for: " + identifier.getApiName(), e); } return count; } private OMElement createThrottlePolicy(Tier tier) throws APIManagementException { OMElement throttlePolicy = null; String policy = APIConstants.THROTTLE_POLICY_TEMPLATE; StringBuilder attributeBuilder = new StringBuilder(); Map<String, Object> tierAttributes = tier.getTierAttributes(); if (tierAttributes != null) { for (Map.Entry<String, Object> entry : tierAttributes.entrySet()) { if (entry.getValue() instanceof String) { String attributeName = entry.getKey().trim(); String attributeValue = ((String) entry.getValue()).trim(); // We see whether the attribute name is empty. if (!attributeName.isEmpty()) { attributeBuilder.append(String.format(APIConstants.THROTTLE_POLICY_ATTRIBUTE_TEMPLATE, attributeName, attributeValue, attributeName)); } } else { if (log.isDebugEnabled()) { log.debug("Unrecognized throttle attribute value : " + entry.getValue() + " of attribute name : " + entry.getKey()); } } } } // We add the "description", "billing plan" and "stop on quota reach" as custom attributes attributeBuilder.append(String.format(APIConstants.THROTTLE_POLICY_ATTRIBUTE_TEMPLATE, APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE, tier.getDescription().trim(), APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE)); attributeBuilder.append(String.format(APIConstants.THROTTLE_POLICY_ATTRIBUTE_TEMPLATE, APIConstants.THROTTLE_TIER_PLAN_ATTRIBUTE, tier.getTierPlan().trim(), APIConstants.THROTTLE_TIER_PLAN_ATTRIBUTE)); attributeBuilder.append(String.format(APIConstants.THROTTLE_POLICY_ATTRIBUTE_TEMPLATE, APIConstants.THROTTLE_TIER_QUOTA_ACTION_ATTRIBUTE, String.valueOf(tier.isStopOnQuotaReached()), APIConstants.THROTTLE_TIER_QUOTA_ACTION_ATTRIBUTE)); // Note: We assume that the unit time is in milliseconds. policy = String.format(policy, tier.getName(), tier.getRequestCount(), tier.getUnitTime(), attributeBuilder.toString()); try { throttlePolicy = AXIOMUtil.stringToOM(policy); } catch (XMLStreamException e) { handleException("Invalid policy xml generated", e); } return throttlePolicy; } /** * Adds a new API to the Store * * @param api API * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to add API */ public API addAPI(API api) throws APIManagementException { validateApiInfo(api); String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); validateResourceThrottlingTiers(api, tenantDomain); validateKeyManagers(api); if (api.isEndpointSecured() && StringUtils.isEmpty(api.getEndpointUTPassword())) { String errorMessage = "Empty password is given for endpointSecurity when creating API " + api.getId().getApiName(); throw new APIManagementException(errorMessage); } //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); //Add default API LC if it is not there try { if (!CommonUtil.lifeCycleExists(APIConstants.API_LIFE_CYCLE, registryService.getConfigSystemRegistry(tenantId))) { String defaultLifecyclePath = CommonUtil.getDefaltLifecycleConfigLocation() + File.separator + APIConstants.API_LIFE_CYCLE + APIConstants.XML_EXTENSION; File file = new File(defaultLifecyclePath); String content = null; if (file != null && file.exists()) { content = FileUtils.readFileToString(file); } if (content != null) { CommonUtil.addLifecycle(content, registryService.getConfigSystemRegistry(tenantId), CommonUtil.getRootSystemRegistry(tenantId)); } } } catch (RegistryException e) { handleException("Error occurred while adding default APILifeCycle.", e); } catch (IOException e) { handleException("Error occurred while loading APILifeCycle.xml.", e); } catch (XMLStreamException e) { handleException("Error occurred while adding default API LifeCycle.", e); } try { PublisherAPI addedAPI = apiPersistenceInstance.addAPI(new Organization(api.getOrganization()), APIMapper.INSTANCE.toPublisherApi(api)); api.setUuid(addedAPI.getId()); api.setCreatedTime(addedAPI.getCreatedTime()); } catch (APIPersistenceException e) { throw new APIManagementException("Error while persisting API ", e); } if (log.isDebugEnabled()) { log.debug("API details successfully added to the registry. API Name: " + api.getId().getApiName() + ", API Version : " + api.getId().getVersion() + ", API context : " + api.getContext()); } int tenantId = APIUtil.getInternalOrganizationId(api.getOrganization()); addAPI(api, tenantId); JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, api.getId().getApiName()); apiLogObject.put(APIConstants.AuditLogConstants.CONTEXT, api.getContext()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, api.getId().getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, api.getId().getProviderName()); APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API, apiLogObject.toString(), APIConstants.AuditLogConstants.CREATED, this.username); if (log.isDebugEnabled()) { log.debug("API details successfully added to the API Manager Database. API Name: " + api.getId() .getApiName() + ", API Version : " + api.getId().getVersion() + ", API context : " + api .getContext()); } if (APIUtil.isAPIManagementEnabled()) { Cache contextCache = APIUtil.getAPIContextCache(); Boolean apiContext = null; Object cachedObject = contextCache.get(api.getContext()); if (cachedObject != null) { apiContext = Boolean.valueOf(cachedObject.toString()); } if (apiContext == null) { contextCache.put(api.getContext(), Boolean.TRUE); } } if ("null".equals(api.getAccessControlRoles())) { api.setAccessControlRoles(null); } //notify key manager with API addition registerOrUpdateResourceInKeyManager(api, tenantDomain); return api; } /** * Add API metadata, local scopes and URI templates to the database and KeyManager. * * @param api API to add * @param tenantId Tenant Id * @throws APIManagementException if an error occurs while adding the API */ private void addAPI(API api, int tenantId) throws APIManagementException { int apiId = apiMgtDAO.addAPI(api, tenantId, api.getOrganization()); addLocalScopes(api.getId().getApiName(), api.getUriTemplates(), api.getOrganization()); addURITemplates(apiId, api, tenantId); String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_CREATE.name(), tenantId, tenantDomain, api.getId().getApiName(), apiId, api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); } /** * Add local scopes for the API if the scopes does not exist as shared scopes. The local scopes to add will be * take from the URI templates. * * @param apiName API name * @param uriTemplates URI Templates * @param organization Organization * @throws APIManagementException if fails to add local scopes for the API */ private void addLocalScopes(String apiName, Set<URITemplate> uriTemplates, String organization) throws APIManagementException { int tenantId = APIUtil.getInternalOrganizationId(organization); String tenantDomain = APIUtil.getTenantDomainFromTenantId(tenantId); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); //Get the local scopes set to register for the API from URI templates Set<Scope> scopesToRegister = getScopesToRegisterFromURITemplates(apiName, organization, uriTemplates); //Register scopes for (Scope scope : scopesToRegister) { for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { String scopeKey = scope.getKey(); try { // Check if key already registered in KM. Scope Key may be already registered for a different // version. if (!keyManager.isScopeExists(scopeKey)) { //register scope in KM keyManager.registerScope(scope); } else { if (log.isDebugEnabled()) { log.debug("Scope: " + scopeKey + " already registered in KM. Skipping registering scope."); } } } catch (APIManagementException e) { log.error("Error while registering Scope " + scopeKey + "in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } addScopes(scopesToRegister, tenantId); } /** * Extract the scopes set from URI templates which needs to be registered as local scopes for the API. * * @param apiName API name * @param organization Organization * @param uriTemplates URI templates * @return Local Scopes set to register * @throws APIManagementException if fails to extract Scopes from URI templates */ private Set<Scope> getScopesToRegisterFromURITemplates(String apiName, String organization, Set<URITemplate> uriTemplates) throws APIManagementException { int tenantId = APIUtil.getInternalOrganizationId(organization); Set<Scope> scopesToRegister = new HashSet<>(); Set<Scope> uriTemplateScopes = new HashSet<>(); //Get the attached scopes set from the URI templates for (URITemplate uriTemplate : uriTemplates) { List<Scope> scopesFromURITemplate = uriTemplate.retrieveAllScopes(); for (Scope scopeFromURITemplate : scopesFromURITemplate) { if (scopeFromURITemplate == null) { continue; // No scopes attached for the URI Template } uriTemplateScopes.add(scopeFromURITemplate); } } //Validate and extract only the local scopes which need to be registered in KM for (Scope scope : uriTemplateScopes) { String scopeKey = scope.getKey(); //Check if it an existing shared scope, if so skip adding scope if (!isSharedScopeNameExists(scopeKey, tenantId)) { // Check if scope key is already assigned locally to a different API (Other than different versions of // the same API). if (!isScopeKeyAssignedLocally(apiName, scope.getKey(), organization)) { scopesToRegister.add(scope); } else { throw new APIManagementException("Error while adding local scopes for API " + apiName + ". Scope: " + scopeKey + " already assigned locally for a different API."); } } else if (log.isDebugEnabled()) { log.debug("Scope " + scopeKey + " exists as a shared scope. Skip adding as a local scope."); } } return scopesToRegister; } /** * Add URI templates for the API. * * @param apiId API Id * @param api API * @param tenantId Tenant Id * @throws APIManagementException if fails to add URI templates for the API */ private void addURITemplates(int apiId, API api, int tenantId) throws APIManagementException { String tenantDomain = APIUtil.getTenantDomainFromTenantId(tenantId); apiMgtDAO.addURITemplates(apiId, api, tenantId); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.attachResourceScopes(api, api.getUriTemplates()); } catch (APIManagementException e) { log.error("Error while Attaching Resource to scope in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } /** * Notify the key manager with API update or addition * * @param api API * @param tenantDomain * @throws APIManagementException when error occurs when register/update API at Key Manager side */ private void registerOrUpdateResourceInKeyManager(API api, String tenantDomain) throws APIManagementException { //get new key manager instance for resource registration. Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { Map registeredResource = keyManager.getResourceByApiId(api.getId().toString()); if (registeredResource == null) { boolean isNewResourceRegistered = keyManager.registerNewResource(api, null); if (!isNewResourceRegistered) { log.warn("APIResource registration is failed while adding the API- " + api.getId().getApiName() + "-" + api.getId().getVersion() + " into Key Manager : " + keyManagerDtoEntry.getKey()); } } else { //update APIResource. String resourceId = (String) registeredResource.get("resourceId"); if (resourceId == null) { handleException("APIResource update is failed because of empty resourceID."); } keyManager.updateRegisteredResource(api, registeredResource); } } catch (APIManagementException e) { log.error("API Resource Registration failed in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } /** * Validates the name and version of api against illegal characters. * * @param api API info object * @throws APIManagementException */ private void validateApiInfo(API api) throws APIManagementException { String apiName = api.getId().getApiName(); String apiVersion = api.getId().getVersion(); if (apiName == null) { handleException("API Name is required."); } else if (containsIllegals(apiName)) { handleException("API Name contains one or more illegal characters " + "( " + APIConstants.REGEX_ILLEGAL_CHARACTERS_FOR_API_METADATA + " )"); } if (apiVersion == null) { handleException("API Version is required."); } else if (containsIllegals(apiVersion)) { handleException("API Version contains one or more illegal characters " + "( " + APIConstants.REGEX_ILLEGAL_CHARACTERS_FOR_API_METADATA + " )"); } if (!hasValidLength(apiName, APIConstants.MAX_LENGTH_API_NAME) || !hasValidLength(apiVersion, APIConstants.MAX_LENGTH_VERSION) || !hasValidLength(api.getId().getProviderName(), APIConstants.MAX_LENGTH_PROVIDER) || !hasValidLength(api.getContext(), APIConstants.MAX_LENGTH_CONTEXT) ) { throw new APIManagementException("Character length exceeds the allowable limit", ExceptionCodes.LENGTH_EXCEEDS); } } public void deleteSubscriptionBlockCondition(String conditionValue) throws APIManagementException { BlockConditionsDTO blockCondition = apiMgtDAO.getSubscriptionBlockCondition(conditionValue, tenantDomain); if (blockCondition != null) { deleteBlockConditionByUUID(blockCondition.getUUID()); } } /** * This method is used to get the context of API identified by the given uuid * * @param uuid api uuid * @return apiContext * @throws APIManagementException if failed to fetch the context for api uuid */ public String getAPIContext(String uuid) throws APIManagementException { return apiMgtDAO.getAPIContext(uuid); } /** * Check whether a string contains illegal charactersA * * @param toExamine string to examine for illegal characters * @return true if found illegal characters, else false */ public boolean containsIllegals(String toExamine) { Pattern pattern = Pattern.compile(APIConstants.REGEX_ILLEGAL_CHARACTERS_FOR_API_METADATA); Matcher matcher = pattern.matcher(toExamine); return matcher.find(); } /** * Check whether the provided information exceeds the maximum length * @param field text field to validate * @param maxLength maximum allowd length * @return true if the length is valid */ public boolean hasValidLength(String field, int maxLength) { return field.length() <= maxLength; } /** * Persist API Status into a property of API Registry resource * * @param artifactId API artifact ID * @param apiStatus Current status of the API * @throws APIManagementException on error */ private void saveAPIStatus(String artifactId, String apiStatus) throws APIManagementException { try { Resource resource = registry.get(artifactId); if (resource != null) { String propValue = resource.getProperty(APIConstants.API_STATUS); if (propValue == null) { resource.addProperty(APIConstants.API_STATUS, apiStatus); } else { resource.setProperty(APIConstants.API_STATUS, apiStatus); } registry.put(artifactId, resource); } } catch (RegistryException e) { handleException("Error while adding API", e); } } @Override public String getDefaultVersion(APIIdentifier apiid) throws APIManagementException { String defaultVersion = null; try { defaultVersion = apiMgtDAO.getDefaultVersion(apiid); } catch (APIManagementException e) { handleException("Error while getting default version :" + apiid.getApiName(), e); } return defaultVersion; } public String getPublishedDefaultVersion(APIIdentifier apiid) throws APIManagementException { String defaultVersion = null; try { defaultVersion = apiMgtDAO.getPublishedDefaultVersion(apiid); } catch (APIManagementException e) { handleException("Error while getting published default version :" + apiid.getApiName(), e); } return defaultVersion; } /** * This method is used to save the wsdl file in the registry * This is used when user starts api creation with a soap endpoint * * @param api api object * @throws APIManagementException * @throws RegistryException */ public void updateWsdlFromUrl(API api) throws APIManagementException { boolean transactionCommitted = false; try { registry.beginTransaction(); String apiArtifactId = registry.get(APIUtil.getAPIPath(api.getId())).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Artifact manager is null when updating WSDL of API " + api.getId().getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId); GenericArtifact apiArtifact = APIUtil.createAPIArtifactContent(artifact, api); String artifactPath = GovernanceUtils.getArtifactPath(registry, apiArtifact.getId()); if (APIUtil.isValidWSDLURL(api.getWsdlUrl(), false)) { String path = APIUtil.createWSDL(registry, api); updateWSDLUriInAPIArtifact(path, artifactManager, apiArtifact, artifactPath); } registry.commitTransaction(); transactionCommitted = true; } catch (RegistryException e) { try { registry.rollbackTransaction(); } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } throw new APIManagementException("Error occurred while saving the wsdl in the registry.", e); } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } } } public void updateWsdlFromResourceFile(API api) throws APIManagementException { boolean transactionCommitted = false; try { registry.beginTransaction(); String apiArtifactId = registry.get(APIUtil.getAPIPath(api.getId())).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Artifact manager is null when updating WSDL of API " + api.getId().getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId); GenericArtifact apiArtifact = APIUtil.createAPIArtifactContent(artifact, api); String artifactPath = GovernanceUtils.getArtifactPath(registry, apiArtifact.getId()); if (api.getWsdlResource() != null) { String path = APIUtil.saveWSDLResource(registry, api); apiArtifact.setAttribute(APIConstants.API_OVERVIEW_WSDL, api.getWsdlUrl()); //reset the wsdl path artifactManager.updateGenericArtifact(apiArtifact); //update the artifact registry.commitTransaction(); transactionCommitted = true; } } catch (RegistryException e) { try { registry.rollbackTransaction(); } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } } } public boolean isAPIUpdateValid(API api) throws APIManagementException { String apiSourcePath = APIUtil.getAPIPath(api.getId()); boolean isValid = false; try { Resource apiSourceArtifact = registry.get(apiSourcePath); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when checking validity of API update for " + api.getId() .getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiSourceArtifact.getUUID()); String status = APIUtil.getLcStateFromArtifact(artifact); if (!APIConstants.CREATED.equals(status) && !APIConstants.PROTOTYPED.equals(status)) { //api at least is in published status if (APIUtil.hasPermission(getUserNameWithoutChange(), APIConstants.Permissions.API_PUBLISH)) { //user has publish permission isValid = true; } } else if (APIConstants.CREATED.equals(status) || APIConstants.PROTOTYPED.equals(status)) { //api in create status if (APIUtil.hasPermission(getUserNameWithoutChange(), APIConstants.Permissions.API_CREATE) || APIUtil.hasPermission(getUserNameWithoutChange(), APIConstants.Permissions.API_PUBLISH)) { //user has creat or publish permission isValid = true; } } } catch (RegistryException ex) { handleException("Error while validate user for API publishing", ex); } return isValid; } /** * Updates an existing API * * @param api API * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to update API * @throws org.wso2.carbon.apimgt.api.FaultGatewaysException on Gateway Failure */ @Override public void updateAPI(API api) throws APIManagementException, FaultGatewaysException { boolean isValid = isAPIUpdateValid(api); if (!isValid) { throw new APIManagementException(" User doesn't have permission for update"); } API oldApi = getAPIbyUUID(api.getUuid(), api.getOrganization()); String organization = api.getOrganization(); if (!oldApi.getStatus().equals(api.getStatus())) { // We don't allow API status updates via this method. // Use changeAPIStatus for that kind of updates. throw new APIManagementException("Invalid API update operation involving API status changes"); } validateKeyManagers(api); Gson gson = new Gson(); Map<String, String> oldMonetizationProperties = gson.fromJson(oldApi.getMonetizationProperties().toString(), HashMap.class); if (oldMonetizationProperties != null && !oldMonetizationProperties.isEmpty()) { Map<String, String> newMonetizationProperties = gson.fromJson(api.getMonetizationProperties().toString(), HashMap.class); if (newMonetizationProperties != null) { for (Map.Entry<String, String> entry : oldMonetizationProperties.entrySet()) { String newValue = newMonetizationProperties.get(entry.getKey()); if (StringUtils.isAllBlank(newValue)) { newMonetizationProperties.put(entry.getKey(), entry.getValue()); } } JSONParser parser = new JSONParser(); try { JSONObject jsonObj = (JSONObject) parser.parse(gson.toJson(newMonetizationProperties)); api.setMonetizationProperties(jsonObj); } catch (ParseException e) { throw new APIManagementException("Error when parsing monetization properties ", e); } } } String publishedDefaultVersion = getPublishedDefaultVersion(api.getId()); //Update WSDL in the registry if (api.getWsdlUrl() != null && api.getWsdlResource() == null) { updateWsdlFromUrl(api); } if (api.getWsdlResource() != null) { updateWsdlFromResourceFile(api); } boolean updatePermissions = false; if (APIUtil.isAccessControlEnabled()) { if (!oldApi.getAccessControl().equals(api.getAccessControl()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getAccessControl()) && !api.getAccessControlRoles().equals(oldApi.getAccessControlRoles())) || !oldApi.getVisibility().equals(api.getVisibility()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getVisibility()) && !api.getVisibleRoles().equals(oldApi.getVisibleRoles()))) { updatePermissions = true; } } else if (!oldApi.getVisibility().equals(api.getVisibility()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getVisibility()) && !api.getVisibleRoles().equals(oldApi.getVisibleRoles()))) { updatePermissions = true; } updateEndpointSecurity(oldApi, api); String apiUUid = updateApiArtifact(api, true, updatePermissions); api.setUuid(apiUUid); if (!oldApi.getContext().equals(api.getContext())) { api.setApiHeaderChanged(true); } int tenantId; String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); try { tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); } catch (UserStoreException e) { throw new APIManagementException( "Error in retrieving Tenant Information while updating api :" + api.getId().getApiName(), e); } validateResourceThrottlingTiers(api, tenantDomain); //get product resource mappings on API before updating the API. Update uri templates on api will remove all //product mappings as well. List<APIProductResource> productResources = apiMgtDAO.getProductMappingsForAPI(api); updateAPI(api, tenantId, userNameWithoutChange); updateProductResourceMappings(api, organization, productResources); if (log.isDebugEnabled()) { log.debug("Successfully updated the API: " + api.getId() + " in the database"); } JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, api.getId().getApiName()); apiLogObject.put(APIConstants.AuditLogConstants.CONTEXT, api.getContext()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, api.getId().getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, api.getId().getProviderName()); APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API, apiLogObject.toString(), APIConstants.AuditLogConstants.UPDATED, this.username); //update doc visibility List<Documentation> docsList = getAllDocumentation(api.getId()); if (docsList != null) { Iterator it = docsList.iterator(); while (it.hasNext()) { Object docsObject = it.next(); Documentation docs = (Documentation) docsObject; updateDocVisibility(api, docs); } } //notify key manager with API update registerOrUpdateResourceInKeyManager(api, tenantDomain); int apiId = apiMgtDAO.getAPIID(api.getUuid()); if (publishedDefaultVersion != null) { if (api.isPublishedDefaultVersion() && !api.getId().getVersion().equals(publishedDefaultVersion)) { APIIdentifier previousDefaultVersionIdentifier = new APIIdentifier(api.getId().getProviderName(), api.getId().getApiName(), publishedDefaultVersion); sendUpdateEventToPreviousDefaultVersion(previousDefaultVersionIdentifier, organization); } } APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, api.getId().getApiName(), apiId, api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); // Extracting API details for the recommendation system if (recommendationEnvironment != null) { RecommenderEventPublisher extractor = new RecommenderDetailsExtractor(api, tenantDomain, APIConstants.ADD_API); Thread recommendationThread = new Thread(extractor); recommendationThread.start(); } } private void sendUpdateEventToPreviousDefaultVersion(APIIdentifier apiIdentifier, String organization) throws APIManagementException { API api = apiMgtDAO.getLightWeightAPIInfoByAPIIdentifier(apiIdentifier, organization); APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, apiIdentifier.getApiName(), api.getId().getId(), api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); } public API updateAPI(API api, API existingAPI) throws APIManagementException { if (!existingAPI.getStatus().equals(api.getStatus())) { throw new APIManagementException("Invalid API update operation involving API status changes"); } String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); validateKeyManagers(api); String publishedDefaultVersion = getPublishedDefaultVersion(api.getId()); Gson gson = new Gson(); String organization = api.getOrganization(); Map<String, String> oldMonetizationProperties = gson.fromJson(existingAPI.getMonetizationProperties().toString(), HashMap.class); if (oldMonetizationProperties != null && !oldMonetizationProperties.isEmpty()) { Map<String, String> newMonetizationProperties = gson.fromJson(api.getMonetizationProperties().toString(), HashMap.class); if (newMonetizationProperties != null) { for (Map.Entry<String, String> entry : oldMonetizationProperties.entrySet()) { String newValue = newMonetizationProperties.get(entry.getKey()); if (StringUtils.isAllBlank(newValue)) { newMonetizationProperties.put(entry.getKey(), entry.getValue()); } } JSONParser parser = new JSONParser(); try { JSONObject jsonObj = (JSONObject) parser.parse(gson.toJson(newMonetizationProperties)); api.setMonetizationProperties(jsonObj); } catch (ParseException e) { throw new APIManagementException("Error when parsing monetization properties ", e); } } } updateEndpointSecurity(existingAPI, api); if (!existingAPI.getContext().equals(api.getContext())) { api.setApiHeaderChanged(true); } int tenantId = APIUtil.getInternalOrganizationId(organization); validateResourceThrottlingTiers(api, tenantDomain); //get product resource mappings on API before updating the API. Update uri templates on api will remove all //product mappings as well. List<APIProductResource> productResources = apiMgtDAO.getProductMappingsForAPI(api); updateAPI(api, tenantId, userNameWithoutChange); updateProductResourceMappings(api, organization, productResources); if (log.isDebugEnabled()) { log.debug("Successfully updated the API: " + api.getId() + " in the database"); } JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, api.getId().getApiName()); apiLogObject.put(APIConstants.AuditLogConstants.CONTEXT, api.getContext()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, api.getId().getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, api.getId().getProviderName()); loadMediationPoliciesToAPI(api, tenantDomain); try { api.setCreatedTime(existingAPI.getCreatedTime()); apiPersistenceInstance.updateAPI(new Organization(organization), APIMapper.INSTANCE.toPublisherApi(api)); } catch (APIPersistenceException e) { throw new APIManagementException("Error while updating API details", e); } APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API, apiLogObject.toString(), APIConstants.AuditLogConstants.UPDATED, this.username); //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); try { api.setCreatedTime(existingAPI.getCreatedTime()); apiPersistenceInstance.updateAPI(new Organization(organization), APIMapper.INSTANCE.toPublisherApi(api)); } catch (APIPersistenceException e) { throw new APIManagementException("Error while updating API details", e); } //notify key manager with API update registerOrUpdateResourceInKeyManager(api, tenantDomain); int apiId = apiMgtDAO.getAPIID(api.getUuid()); if (publishedDefaultVersion != null) { if (api.isPublishedDefaultVersion() && !api.getId().getVersion().equals(publishedDefaultVersion)) { APIIdentifier previousDefaultVersionIdentifier = new APIIdentifier(api.getId().getProviderName(), api.getId().getApiName(), publishedDefaultVersion); sendUpdateEventToPreviousDefaultVersion(previousDefaultVersionIdentifier, organization); } } APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, api.getId().getApiName(), apiId, api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); // Extracting API details for the recommendation system if (recommendationEnvironment != null) { RecommenderEventPublisher extractor = new RecommenderDetailsExtractor(api, tenantDomain, APIConstants.ADD_API); Thread recommendationThread = new Thread(extractor); recommendationThread.start(); } return api; } private void updateDocumentPermissions(API api, API oldApi) throws APIManagementException { boolean updatePermissions = false; if (APIUtil.isAccessControlEnabled()) { if (!oldApi.getAccessControl().equals(api.getAccessControl()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getAccessControl()) && !api.getAccessControlRoles().equals(oldApi.getAccessControlRoles())) || !oldApi.getVisibility().equals(api.getVisibility()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getVisibility()) && !api.getVisibleRoles().equals(oldApi.getVisibleRoles()))) { updatePermissions = true; } } else if (!oldApi.getVisibility().equals(api.getVisibility()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getVisibility()) && !api.getVisibleRoles().equals(oldApi.getVisibleRoles()))) { updatePermissions = true; } String visibleRolesList = api.getVisibleRoles(); String[] visibleRoles = new String[0]; if (visibleRolesList != null) { visibleRoles = visibleRolesList.split(","); } //TODO check if registry.beginTransaction(); flow is needed List<Documentation> docs = getAllDocumentation(api.getId()); if (updatePermissions) { APIManagerConfiguration config = getAPIManagerConfiguration(); boolean isSetDocLevelPermissions = Boolean.parseBoolean( config.getFirstProperty(APIConstants.API_PUBLISHER_ENABLE_API_DOC_VISIBILITY_LEVELS)); String docRootPath = APIUtil.getAPIDocPath(api.getId()); if (isSetDocLevelPermissions) { // Retain the docs for (Documentation doc : docs) { if ((APIConstants.DOC_API_BASED_VISIBILITY).equalsIgnoreCase(doc.getVisibility().name())) { String documentationPath = APIUtil.getAPIDocPath(api.getId()) + doc.getName(); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, documentationPath, registry); if (Documentation.DocumentSourceType.INLINE.equals(doc.getSourceType()) || Documentation.DocumentSourceType.MARKDOWN.equals(doc.getSourceType())) { String contentPath = APIUtil.getAPIDocContentPath(api.getId(), doc.getName()); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, contentPath, registry); } else if (Documentation.DocumentSourceType.FILE.equals(doc.getSourceType()) && doc.getFilePath() != null) { String filePath = APIUtil.getDocumentationFilePath(api.getId(), doc.getFilePath() .split("files" + RegistryConstants.PATH_SEPARATOR)[1]); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, filePath, registry); } } } } else { APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, docRootPath, registry); } } else { //In order to support content search feature - we need to update resource permissions of document resources //if their visibility is set to API level. if (docs != null) { for (Documentation doc : docs) { if ((APIConstants.DOC_API_BASED_VISIBILITY).equalsIgnoreCase(doc.getVisibility().name())) { String documentationPath = APIUtil.getAPIDocPath(api.getId()) + doc.getName(); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, documentationPath, registry); } } } } //update doc visibility if (docs != null) { Iterator it = docs.iterator(); while (it.hasNext()) { Object docsObject = it.next(); Documentation doc = (Documentation) docsObject; updateDocVisibility(api,doc); } } } private void validateKeyManagers(API api) throws APIManagementException { List<KeyManagerConfigurationDTO> keyManagerConfigurationsByTenant = apiMgtDAO.getKeyManagerConfigurationsByOrganization(tenantDomain); List<String> configuredMissingKeyManagers = new ArrayList<>(); for (String keyManager : api.getKeyManagers()) { if (!APIConstants.KeyManager.API_LEVEL_ALL_KEY_MANAGERS.equals(keyManager)) { KeyManagerConfigurationDTO selectedKeyManager = null; for (KeyManagerConfigurationDTO keyManagerConfigurationDTO : keyManagerConfigurationsByTenant) { if (keyManager.equals(keyManagerConfigurationDTO.getName())) { selectedKeyManager = keyManagerConfigurationDTO; break; } } if (selectedKeyManager == null) { configuredMissingKeyManagers.add(keyManager); } } } if (!configuredMissingKeyManagers.isEmpty()) { throw new APIManagementException( "Key Manager(s) Not found :" + String.join(" , ", configuredMissingKeyManagers), ExceptionCodes.KEY_MANAGER_NOT_REGISTERED); } } /** * Update API metadata and resources. * * @param api API to update * @param tenantId Tenant Id * @param username Username of the user who is updating * @throws APIManagementException If fails to update API. */ private void updateAPI(API api, int tenantId, String username) throws APIManagementException { apiMgtDAO.updateAPI(api, username); if (log.isDebugEnabled()) { log.debug("Successfully updated the API: " + api.getId() + " metadata in the database"); } updateAPIResources(api, tenantId); } /** * Update resources of the API including local scopes and resource to scope attachments. * * @param api API * @param tenantId Tenant Id * @throws APIManagementException If fails to update local scopes of the API. */ private void updateAPIResources(API api, int tenantId) throws APIManagementException { String tenantDomain = APIUtil.getTenantDomainFromTenantId(tenantId); APIIdentifier apiIdentifier = api.getId(); // Get the new URI templates for the API Set<URITemplate> uriTemplates = api.getUriTemplates(); // Get the existing local scope keys attached for the API Set<String> oldLocalScopeKeys = apiMgtDAO.getAllLocalScopeKeysForAPI(api.getUuid(), tenantId); // Get the existing URI templates for the API Set<URITemplate> oldURITemplates = apiMgtDAO.getURITemplatesOfAPI(api.getUuid()); // Get the new local scope keys from URI templates Set<Scope> newLocalScopes = getScopesToRegisterFromURITemplates(api.getId().getApiName(), api.getOrganization(), uriTemplates); Set<String> newLocalScopeKeys = newLocalScopes.stream().map(Scope::getKey).collect(Collectors.toSet()); // Get the existing versioned local scope keys attached for the API Set<String> oldVersionedLocalScopeKeys = apiMgtDAO.getVersionedLocalScopeKeysForAPI(api.getUuid(), tenantId); // Get the existing versioned local scope keys which needs to be removed (not updated) from the current updating // API and remove them from the oldLocalScopeKeys set before sending to KM, so that they will not be removed // from KM and can be still used by other versioned APIs. Iterator oldLocalScopesItr = oldLocalScopeKeys.iterator(); while (oldLocalScopesItr.hasNext()) { String oldLocalScopeKey = (String) oldLocalScopesItr.next(); // if the scope is used in versioned APIs and it is not in new local scope key set if (oldVersionedLocalScopeKeys.contains(oldLocalScopeKey) && !newLocalScopeKeys.contains(oldLocalScopeKey)) { //remove from old local scope key set which will be send to KM oldLocalScopesItr.remove(); } } apiMgtDAO.updateURITemplates(api, tenantId); if (log.isDebugEnabled()) { log.debug("Successfully updated the URI templates of API: " + apiIdentifier + " in the database"); } // Update the resource scopes of the API in KM. // Need to remove the old local scopes and register new local scopes and, update the resource scope mappings // using the updated URI templates of the API. deleteScopes(oldLocalScopeKeys, tenantId); addScopes(newLocalScopes, tenantId); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.updateResourceScopes(api, oldLocalScopeKeys, newLocalScopes, oldURITemplates, uriTemplates); if (log.isDebugEnabled()) { log.debug("Successfully updated the resource scopes of API: " + apiIdentifier + " in Key Manager "+ keyManagerDtoEntry.getKey()+" ."); } } catch (APIManagementException e) { log.error("Error while updating resource to scope attachment in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } private void updateEndpointSecurity(API oldApi, API api) throws APIManagementException { try { if (api.isEndpointSecured() && StringUtils.isBlank(api.getEndpointUTPassword()) && !StringUtils.isBlank(oldApi.getEndpointUTPassword())) { if (log.isDebugEnabled()) { log.debug("Given endpoint security password is empty"); } api.setEndpointUTUsername(oldApi.getEndpointUTUsername()); api.setEndpointUTPassword(oldApi.getEndpointUTPassword()); if (log.isDebugEnabled()) { log.debug("Using the previous username and password for endpoint security"); } } else { String endpointConfig = api.getEndpointConfig(); String oldEndpointConfig = oldApi.getEndpointConfig(); if (StringUtils.isNotEmpty(endpointConfig) && StringUtils.isNotEmpty(oldEndpointConfig)) { JSONObject endpointConfigJson = (JSONObject) new JSONParser().parse(endpointConfig); JSONObject oldEndpointConfigJson = (JSONObject) new JSONParser().parse(oldEndpointConfig); if ((endpointConfigJson.get(APIConstants.ENDPOINT_SECURITY) != null) && (oldEndpointConfigJson.get(APIConstants.ENDPOINT_SECURITY) != null)) { JSONObject endpointSecurityJson = (JSONObject) endpointConfigJson.get(APIConstants.ENDPOINT_SECURITY); JSONObject oldEndpointSecurityJson = (JSONObject) oldEndpointConfigJson.get(APIConstants.ENDPOINT_SECURITY); if (endpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_PRODUCTION) != null) { if (oldEndpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_PRODUCTION) != null) { EndpointSecurity endpointSecurity = new ObjectMapper().convertValue( endpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_PRODUCTION), EndpointSecurity.class); EndpointSecurity oldEndpointSecurity = new ObjectMapper().convertValue( oldEndpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_PRODUCTION), EndpointSecurity.class); if (endpointSecurity.isEnabled() && oldEndpointSecurity.isEnabled() && StringUtils.isBlank(endpointSecurity.getPassword())) { endpointSecurity.setUsername(oldEndpointSecurity.getUsername()); endpointSecurity.setPassword(oldEndpointSecurity.getPassword()); if (endpointSecurity.getType().equals(APIConstants.ENDPOINT_SECURITY_TYPE_OAUTH)) { endpointSecurity.setUniqueIdentifier(oldEndpointSecurity.getUniqueIdentifier()); endpointSecurity.setGrantType(oldEndpointSecurity.getGrantType()); endpointSecurity.setTokenUrl(oldEndpointSecurity.getTokenUrl()); endpointSecurity.setClientId(oldEndpointSecurity.getClientId()); endpointSecurity.setClientSecret(oldEndpointSecurity.getClientSecret()); endpointSecurity.setCustomParameters(oldEndpointSecurity.getCustomParameters()); } } endpointSecurityJson.replace(APIConstants.ENDPOINT_SECURITY_PRODUCTION, new JSONParser() .parse(new ObjectMapper().writeValueAsString(endpointSecurity))); } } if (endpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_SANDBOX) != null) { if (oldEndpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_SANDBOX) != null) { EndpointSecurity endpointSecurity = new ObjectMapper() .convertValue(endpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_SANDBOX), EndpointSecurity.class); EndpointSecurity oldEndpointSecurity = new ObjectMapper() .convertValue(oldEndpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_SANDBOX), EndpointSecurity.class); if (endpointSecurity.isEnabled() && oldEndpointSecurity.isEnabled() && StringUtils.isBlank(endpointSecurity.getPassword())) { endpointSecurity.setUsername(oldEndpointSecurity.getUsername()); endpointSecurity.setPassword(oldEndpointSecurity.getPassword()); if (endpointSecurity.getType().equals(APIConstants.ENDPOINT_SECURITY_TYPE_OAUTH)) { endpointSecurity.setUniqueIdentifier(oldEndpointSecurity.getUniqueIdentifier()); endpointSecurity.setGrantType(oldEndpointSecurity.getGrantType()); endpointSecurity.setTokenUrl(oldEndpointSecurity.getTokenUrl()); endpointSecurity.setClientId(oldEndpointSecurity.getClientId()); endpointSecurity.setClientSecret(oldEndpointSecurity.getClientSecret()); endpointSecurity.setCustomParameters(oldEndpointSecurity.getCustomParameters()); } } endpointSecurityJson.replace(APIConstants.ENDPOINT_SECURITY_SANDBOX, new JSONParser() .parse(new ObjectMapper().writeValueAsString(endpointSecurity))); } endpointConfigJson.replace(APIConstants.ENDPOINT_SECURITY,endpointSecurityJson); } } api.setEndpointConfig(endpointConfigJson.toJSONString()); } } } catch (ParseException | JsonProcessingException e) { throw new APIManagementException( "Error while processing endpoint security for API " + api.getId().toString(), e); } } private String updateApiArtifact(API api, boolean updateMetadata, boolean updatePermissions) throws APIManagementException { //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); boolean transactionCommitted = false; String apiUUID = null; try { registry.beginTransaction(); String apiArtifactId = registry.get(APIUtil.getAPIPath(api.getId())).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId); if (artifactManager == null) { String errorMessage = "Artifact manager is null when updating API artifact ID " + api.getId(); log.error(errorMessage); throw new APIManagementException(errorMessage); } String oldStatus = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS); Resource apiResource = registry.get(artifact.getPath()); String oldAccessControlRoles = api.getAccessControlRoles(); if (apiResource != null) { oldAccessControlRoles = registry.get(artifact.getPath()).getProperty(APIConstants.PUBLISHER_ROLES); } GenericArtifact updateApiArtifact = APIUtil.createAPIArtifactContent(artifact, api); String artifactPath = GovernanceUtils.getArtifactPath(registry, updateApiArtifact.getId()); org.wso2.carbon.registry.core.Tag[] oldTags = registry.getTags(artifactPath); if (oldTags != null) { for (org.wso2.carbon.registry.core.Tag tag : oldTags) { registry.removeTag(artifactPath, tag.getTagName()); } } Set<String> tagSet = api.getTags(); if (tagSet != null) { for (String tag : tagSet) { registry.applyTag(artifactPath, tag); } } if (updateMetadata && api.getEndpointConfig() != null && !api.getEndpointConfig().isEmpty()) { // If WSDL URL get change only we update registry WSDL resource. If its registry resource patch we // will skip registry update. Only if this API created with WSDL end point type we need to update // wsdls for each update. //check for wsdl endpoint org.json.JSONObject response1 = new org.json.JSONObject(api.getEndpointConfig()); boolean isWSAPI = APIConstants.APITransportType.WS.toString().equals(api.getType()); String wsdlURL; if (!APIUtil.isStreamingApi(api) && "wsdl".equalsIgnoreCase(response1.get("endpoint_type").toString()) && response1.has("production_endpoints")) { wsdlURL = response1.getJSONObject("production_endpoints").get("url").toString(); if (APIUtil.isValidWSDLURL(wsdlURL, true)) { String path = APIUtil.createWSDL(registry, api); if (path != null) { // reset the wsdl path to permlink updateApiArtifact.setAttribute(APIConstants.API_OVERVIEW_WSDL, api.getWsdlUrl()); } } } } artifactManager.updateGenericArtifact(updateApiArtifact); //write API Status to a separate property. This is done to support querying APIs using custom query (SQL) //to gain performance String apiStatus = api.getStatus().toUpperCase(); saveAPIStatus(artifactPath, apiStatus); String[] visibleRoles = new String[0]; String publisherAccessControlRoles = api.getAccessControlRoles(); updateRegistryResources(artifactPath, publisherAccessControlRoles, api.getAccessControl(), api.getAdditionalProperties()); //propagate api status change and access control roles change to document artifact String newStatus = updateApiArtifact.getAttribute(APIConstants.API_OVERVIEW_STATUS); if (!StringUtils.equals(oldStatus, newStatus) || !StringUtils.equals(oldAccessControlRoles, publisherAccessControlRoles)) { APIUtil.notifyAPIStateChangeToAssociatedDocuments(artifact, registry); } if (updatePermissions) { APIUtil.clearResourcePermissions(artifactPath, api.getId(), ((UserRegistry) registry).getTenantId()); String visibleRolesList = api.getVisibleRoles(); if (visibleRolesList != null) { visibleRoles = visibleRolesList.split(","); } APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, artifactPath, registry); } //attaching api categories to the API List<APICategory> attachedApiCategories = api.getApiCategories(); artifact.removeAttribute(APIConstants.API_CATEGORIES_CATEGORY_NAME); if (attachedApiCategories != null) { for (APICategory category : attachedApiCategories) { artifact.addAttribute(APIConstants.API_CATEGORIES_CATEGORY_NAME, category.getName()); } } registry.commitTransaction(); transactionCommitted = true; apiUUID = updateApiArtifact.getId(); if (updatePermissions) { APIManagerConfiguration config = getAPIManagerConfiguration(); boolean isSetDocLevelPermissions = Boolean.parseBoolean( config.getFirstProperty(APIConstants.API_PUBLISHER_ENABLE_API_DOC_VISIBILITY_LEVELS)); String docRootPath = APIUtil.getAPIDocPath(api.getId()); if (isSetDocLevelPermissions) { // Retain the docs List<Documentation> docs = getAllDocumentation(api.getId()); for (Documentation doc : docs) { if ((APIConstants.DOC_API_BASED_VISIBILITY).equalsIgnoreCase(doc.getVisibility().name())) { String documentationPath = APIUtil.getAPIDocPath(api.getId()) + doc.getName(); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, documentationPath, registry); if (Documentation.DocumentSourceType.INLINE.equals(doc.getSourceType()) || Documentation.DocumentSourceType.MARKDOWN.equals(doc.getSourceType())) { String contentPath = APIUtil.getAPIDocContentPath(api.getId(), doc.getName()); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, contentPath, registry); } else if (Documentation.DocumentSourceType.FILE.equals(doc.getSourceType()) && doc.getFilePath() != null) { String filePath = APIUtil.getDocumentationFilePath(api.getId(), doc.getFilePath() .split("files" + RegistryConstants.PATH_SEPARATOR)[1]); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, filePath, registry); } } } } else { APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, docRootPath, registry); } } else { //In order to support content search feature - we need to update resource permissions of document resources //if their visibility is set to API level. List<Documentation> docs = getAllDocumentation(api.getId()); if (docs != null) { for (Documentation doc : docs) { if ((APIConstants.DOC_API_BASED_VISIBILITY).equalsIgnoreCase(doc.getVisibility().name())) { String documentationPath = APIUtil.getAPIDocPath(api.getId()) + doc.getName(); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, documentationPath, registry); } } } } } catch (Exception e) { try { registry.rollbackTransaction(); } catch (RegistryException re) { // Throwing an error from this level will mask the original exception log.error("Error while rolling back the transaction for API: " + api.getId().getApiName(), re); } handleException("Error while performing registry transaction operation", e); } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } } return apiUUID; } public Map<String, String> propergateAPIStatusChangeToGateways(String newStatus, API api) throws APIManagementException { Map<String, String> failedGateways = new HashMap<String, String>(); APIIdentifier identifier = api.getId(); String providerTenantMode = identifier.getProviderName(); String name = identifier.getApiName(); String version = identifier.getVersion(); boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } if (api != null) { String currentStatus = api.getStatus(); if (APIConstants.PUBLISHED.equals(newStatus) || !currentStatus.equals(newStatus)) { api.setStatus(newStatus); APIManagerConfiguration config = getAPIManagerConfiguration(); String gatewayType = config.getFirstProperty(APIConstants.API_GATEWAY_TYPE); api.setAsPublishedDefaultVersion(api.getId().getVersion() .equals(apiMgtDAO.getPublishedDefaultVersion(api.getId()))); loadMediationPoliciesToAPI(api, tenantDomain); } } else { handleException("Couldn't find an API with the name-" + name + "version-" + version); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return failedGateways; } private void loadMediationPoliciesToAPI(API api, String organization) throws APIManagementException { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getFaultSequence())) { Organization org = new Organization(organization); String apiUUID = api.getUuid(); // get all policies try { List<MediationInfo> localPolicies = apiPersistenceInstance.getAllMediationPolicies(org, apiUUID); List<Mediation> globalPolicies = null; if (APIUtil.isSequenceDefined(api.getInSequence())) { boolean found = false; for (MediationInfo mediationInfo : localPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN.equals(mediationInfo.getType()) && api.getInSequence().equals(mediationInfo.getName())) { org.wso2.carbon.apimgt.persistence.dto.Mediation mediationPolicy = apiPersistenceInstance .getMediationPolicy(org, apiUUID, mediationInfo.getId()); Mediation mediation = new Mediation(); mediation.setConfig(mediationPolicy.getConfig()); mediation.setName(mediationPolicy.getName()); mediation.setUuid(mediationPolicy.getId()); mediation.setType(APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); mediation.setGlobal(false); api.setInSequenceMediation(mediation); found = true; break; } } if (!found) { // global policy if (globalPolicies == null) { globalPolicies = getAllGlobalMediationPolicies(); } for (Mediation m : globalPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN.equals(m.getType()) && api.getInSequence().equals(m.getName())) { Mediation mediation = getGlobalMediationPolicy(m.getUuid()); mediation.setGlobal(true); api.setInSequenceMediation(mediation); found = true; break; } } } } if (APIUtil.isSequenceDefined(api.getOutSequence())) { boolean found = false; for (MediationInfo mediationInfo : localPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT.equals(mediationInfo.getType()) && api.getOutSequence().equals(mediationInfo.getName())) { org.wso2.carbon.apimgt.persistence.dto.Mediation mediationPolicy = apiPersistenceInstance .getMediationPolicy(org, apiUUID, mediationInfo.getId()); Mediation mediation = new Mediation(); mediation.setConfig(mediationPolicy.getConfig()); mediation.setName(mediationPolicy.getName()); mediation.setUuid(mediationPolicy.getId()); mediation.setType(APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); mediation.setGlobal(false); api.setOutSequenceMediation(mediation); found = true; break; } } if (!found) { // global policy if (globalPolicies == null) { globalPolicies = getAllGlobalMediationPolicies(); } for (Mediation m : globalPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT.equals(m.getType()) && api.getOutSequence().equals(m.getName())) { Mediation mediation = getGlobalMediationPolicy(m.getUuid()); mediation.setGlobal(true); api.setOutSequenceMediation(mediation); found = true; break; } } } } if (APIUtil.isSequenceDefined(api.getFaultSequence())) { boolean found = false; for (MediationInfo mediationInfo : localPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT.equals(mediationInfo.getType()) && api.getFaultSequence().equals(mediationInfo.getName())) { org.wso2.carbon.apimgt.persistence.dto.Mediation mediationPolicy = apiPersistenceInstance .getMediationPolicy(org, apiUUID, mediationInfo.getId()); Mediation mediation = new Mediation(); mediation.setConfig(mediationPolicy.getConfig()); mediation.setName(mediationPolicy.getName()); mediation.setUuid(mediationPolicy.getId()); mediation.setType(APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); mediation.setGlobal(false); api.setFaultSequenceMediation(mediation); found = true; break; } } if (!found) { // global policy if (globalPolicies == null) { globalPolicies = getAllGlobalMediationPolicies(); } for (Mediation m : globalPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT.equals(m.getType()) && api.getFaultSequence().equals(m.getName())) { Mediation mediation = getGlobalMediationPolicy(m.getUuid()); mediation.setGlobal(true); api.setFaultSequenceMediation(mediation); found = true; break; } } } } } catch (MediationPolicyPersistenceException e) { throw new APIManagementException("Error while loading medation policies", e); } } } public boolean updateAPIforStateChange(API api, String currentStatus, String newStatus, Map<String, String> failedGatewaysMap) throws APIManagementException, FaultGatewaysException { boolean isSuccess = false; Map<String, Map<String, String>> failedGateways = new ConcurrentHashMap<String, Map<String, String>>(); String provider = api.getId().getProviderName(); String providerTenantMode = api.getId().getProviderName(); provider = APIUtil.replaceEmailDomain(provider); String name = api.getId().getApiName(); String version = api.getId().getVersion(); boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } //APIIdentifier apiId = new APIIdentifier(provider, name, version); //API api = getAPI(apiId); if (api != null) { //String currentStatus = api.getStatus(); if (!currentStatus.equals(newStatus)) { api.setStatus(newStatus); // If API status changed to publish we should add it to recently added APIs list // this should happen in store-publisher cluster domain if deployment is distributed // IF new API published we will add it to recently added APIs Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER) .getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME).removeAll(); api.setAsPublishedDefaultVersion(api.getId().getVersion() .equals(apiMgtDAO.getPublishedDefaultVersion(api.getId()))); if (failedGatewaysMap != null) { if (APIConstants.PUBLISHED.equals(newStatus) || APIConstants.DEPRECATED.equals(newStatus) || APIConstants.BLOCKED.equals(newStatus) || APIConstants.PROTOTYPED.equals(newStatus)) { Map<String, String> failedToPublishEnvironments = failedGatewaysMap; if (!failedToPublishEnvironments.isEmpty()) { Set<String> publishedEnvironments = new HashSet<String>(api.getEnvironments()); publishedEnvironments.removeAll(new ArrayList<String>(failedToPublishEnvironments .keySet())); api.setEnvironments(publishedEnvironments); //updateApiArtifactNew(api, true, false); failedGateways.clear(); failedGateways.put("UNPUBLISHED", Collections.<String, String>emptyMap()); failedGateways.put("PUBLISHED", failedToPublishEnvironments); } } else { // API Status : RETIRED or CREATED Map<String, String> failedToRemoveEnvironments = failedGatewaysMap; if(!APIConstants.CREATED.equals(newStatus)) { cleanUpPendingSubscriptionCreationProcessesByAPI(api.getUuid()); apiMgtDAO.removeAllSubscriptions(api.getUuid()); } if (!failedToRemoveEnvironments.isEmpty()) { Set<String> publishedEnvironments = new HashSet<String>(api.getEnvironments()); publishedEnvironments.addAll(failedToRemoveEnvironments.keySet()); api.setEnvironments(publishedEnvironments); //updateApiArtifactNew(api, true, false); failedGateways.clear(); failedGateways.put("UNPUBLISHED", failedToRemoveEnvironments); failedGateways.put("PUBLISHED", Collections.<String, String>emptyMap()); } } } //updateApiArtifactNew(api, false, false); PublisherAPI publisherAPI = APIMapper.INSTANCE.toPublisherApi(api); try { apiPersistenceInstance.updateAPI(new Organization(api.getOrganization()), publisherAPI); } catch (APIPersistenceException e) { handleException("Error while persisting the updated API ", e); } } isSuccess = true; } else { handleException("Couldn't find an API with the name-" + name + "version-" + version); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } if (!failedGateways.isEmpty() && (!failedGateways.get("UNPUBLISHED").isEmpty() || !failedGateways.get("PUBLISHED").isEmpty())) { throw new FaultGatewaysException(failedGateways); } return isSuccess; } public boolean updateAPIforStateChange(APIIdentifier identifier, String newStatus, Map<String, String> failedGatewaysMap, API api) throws APIManagementException, FaultGatewaysException { boolean isSuccess = false; Map<String, Map<String, String>> failedGateways = new ConcurrentHashMap<String, Map<String, String>>(); String provider = identifier.getProviderName(); String providerTenantMode = identifier.getProviderName(); provider = APIUtil.replaceEmailDomain(provider); String name = identifier.getApiName(); String version = identifier.getVersion(); boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } APIIdentifier apiId = new APIIdentifier(provider, name, version); if (api != null) { String currentStatus = api.getStatus(); if (!currentStatus.equals(newStatus)) { api.setStatus(newStatus); // If API status changed to publish we should add it to recently added APIs list // this should happen in store-publisher cluster domain if deployment is distributed // IF new API published we will add it to recently added APIs Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER) .getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME).removeAll(); api.setAsPublishedDefaultVersion(api.getId().getVersion() .equals(apiMgtDAO.getPublishedDefaultVersion(api.getId()))); if (failedGatewaysMap != null) { if (APIConstants.PUBLISHED.equals(newStatus) || APIConstants.DEPRECATED.equals(newStatus) || APIConstants.BLOCKED.equals(newStatus) || APIConstants.PROTOTYPED.equals(newStatus)) { Map<String, String> failedToPublishEnvironments = failedGatewaysMap; if (!failedToPublishEnvironments.isEmpty()) { Set<String> publishedEnvironments = new HashSet<String>(api.getEnvironments()); publishedEnvironments.removeAll(new ArrayList<String>(failedToPublishEnvironments .keySet())); api.setEnvironments(publishedEnvironments); updateApiArtifact(api, true, false); failedGateways.clear(); failedGateways.put("UNPUBLISHED", Collections.<String, String>emptyMap()); failedGateways.put("PUBLISHED", failedToPublishEnvironments); } } else { // API Status : RETIRED or CREATED Map<String, String> failedToRemoveEnvironments = failedGatewaysMap; if(!APIConstants.CREATED.equals(newStatus)) { cleanUpPendingSubscriptionCreationProcessesByAPI(api.getUuid()); apiMgtDAO.removeAllSubscriptions(api.getUuid()); } if (!failedToRemoveEnvironments.isEmpty()) { Set<String> publishedEnvironments = new HashSet<String>(api.getEnvironments()); publishedEnvironments.addAll(failedToRemoveEnvironments.keySet()); api.setEnvironments(publishedEnvironments); updateApiArtifact(api, true, false); failedGateways.clear(); failedGateways.put("UNPUBLISHED", failedToRemoveEnvironments); failedGateways.put("PUBLISHED", Collections.<String, String>emptyMap()); } } } updateApiArtifact(api, false, false); } isSuccess = true; } else { handleException("Couldn't find an API with the name-" + name + "version-" + version); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } if (!failedGateways.isEmpty() && (!failedGateways.get("UNPUBLISHED").isEmpty() || !failedGateways.get("PUBLISHED").isEmpty())) { throw new FaultGatewaysException(failedGateways); } return isSuccess; } /** * Function returns true if the specified API already exists in the registry * * @param identifier * @return * @throws APIManagementException */ public boolean checkIfAPIExists(APIIdentifier identifier) throws APIManagementException { String apiPath = APIUtil.getAPIPath(identifier); try { String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(identifier.getProviderName())); Registry registry; if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int id = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceSystemRegistry(id); } else { if (this.tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(this.tenantDomain)) { registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceUserRegistry( identifier.getProviderName(), MultitenantConstants.SUPER_TENANT_ID); } else { if (this.tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(this.tenantDomain)) { registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceUserRegistry( identifier.getProviderName(), MultitenantConstants.SUPER_TENANT_ID); } else { registry = this.registry; } } } return registry.resourceExists(apiPath); } catch (RegistryException e) { handleException("Failed to get API from : " + apiPath, e); return false; } catch (UserStoreException e) { handleException("Failed to get API from : " + apiPath, e); return false; } } public void makeAPIKeysForwardCompatible(API api) throws APIManagementException { String provider = api.getId().getProviderName(); String apiName = api.getId().getApiName(); Set<String> versions = getAPIVersions(provider, apiName, api.getOrganization()); APIVersionComparator comparator = new APIVersionComparator(); List<API> sortedAPIs = new ArrayList<API>(); for (String version : versions) { if (version.equals(api.getId().getVersion())) { continue; } API otherApi = new API(new APIIdentifier(provider, apiName, version));//getAPI(new APIIdentifier(provider, apiName, version)); if (comparator.compare(otherApi, api) < 0 && !APIConstants.RETIRED.equals(otherApi.getStatus())) { sortedAPIs.add(otherApi); } } // Get the subscriptions from the latest api version first Collections.sort(sortedAPIs, comparator); apiMgtDAO.makeKeysForwardCompatible(new ApiTypeWrapper(api), sortedAPIs); } /** * Returns the subscriber name for the given subscription id. * * @param subscriptionId The subscription id of the subscriber to be returned * @return The subscriber or null if the requested subscriber does not exist * @throws APIManagementException if failed to get Subscriber */ @Override public String getSubscriber(String subscriptionId) throws APIManagementException { return apiMgtDAO.getSubscriberName(subscriptionId); } /** * Returns the claims of subscriber for the given subscriber. * * @param subscriber The name of the subscriber to be returned * @return The looked up claims of the subscriber or null if the requested subscriber does not exist * @throws APIManagementException if failed to get Subscriber */ @Override public Map<String, String> getSubscriberClaims(String subscriber) throws APIManagementException { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber); int tenantId = 0; Map<String, String> claimMap = new HashMap<>(); Map<String, String> subscriberClaims = null; String configuredClaims = ""; try { tenantId = getTenantId(tenantDomain); UserStoreManager userStoreManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getUserStoreManager(); if (userStoreManager.isExistingUser(subscriber)) { subscriberClaims = APIUtil.getClaims(subscriber, tenantId, ClaimsRetriever.DEFAULT_DIALECT_URI); APIManagerConfiguration configuration = getAPIManagerConfiguration(); configuredClaims = configuration.getFirstProperty(APIConstants.API_PUBLISHER_SUBSCRIBER_CLAIMS); } for (String claimURI : configuredClaims.split(",")) { if (subscriberClaims != null) { claimMap.put(claimURI, subscriberClaims.get(claimURI)); } } } catch (UserStoreException e) { throw new APIManagementException("Error while retrieving tenant id for tenant domain " + tenantDomain, e); } return claimMap; } private Set<API> getAssociatedAPIs(APIProduct apiProduct) throws APIManagementException { List<APIProductResource> productResources = apiProduct.getProductResources(); Set<API> apis = new HashSet<>(); for (APIProductResource productResource : productResources) { API api = getAPIbyUUID(productResource.getApiId(), CarbonContext.getThreadLocalCarbonContext().getTenantDomain()); apis.add(api); } return apis; } /** * This method returns a list of previous versions of a given API * * @param api * @return oldPublishedAPIList * @throws APIManagementException */ private List<APIIdentifier> getOldPublishedAPIList(API api) throws APIManagementException { List<APIIdentifier> oldPublishedAPIList = new ArrayList<APIIdentifier>(); List<API> apiList = getAPIVersionsByProviderAndName(api.getId().getProviderName(), api.getId().getName(), api.getOrganization()); APIVersionComparator versionComparator = new APIVersionComparator(); for (API oldAPI : apiList) { if (oldAPI.getId().getApiName().equals(api.getId().getApiName()) && versionComparator.compare(oldAPI, api) < 0 && (oldAPI.getStatus().equals(APIConstants.PUBLISHED))) { oldPublishedAPIList.add(oldAPI.getId()); } } return oldPublishedAPIList; } /** * This method used to send notifications to the previous subscribers of older versions of a given API * * @param api * @throws APIManagementException */ private void sendEmailNotification(API api) throws APIManagementException { try { JSONObject tenantConfig = APIUtil.getTenantConfig(tenantDomain); String isNotificationEnabled = "false"; if (tenantConfig.containsKey(NotifierConstants.NOTIFICATIONS_ENABLED)) { isNotificationEnabled = (String) tenantConfig.get(NotifierConstants.NOTIFICATIONS_ENABLED); } if (JavaUtils.isTrueExplicitly(isNotificationEnabled)) { List<APIIdentifier> apiIdentifiers = getOldPublishedAPIList(api); for (APIIdentifier oldAPI : apiIdentifiers) { Properties prop = new Properties(); prop.put(NotifierConstants.API_KEY, oldAPI); prop.put(NotifierConstants.NEW_API_KEY, api.getId()); Set<Subscriber> subscribersOfAPI = apiMgtDAO.getSubscribersOfAPI(oldAPI); prop.put(NotifierConstants.SUBSCRIBERS_PER_API, subscribersOfAPI); NotificationDTO notificationDTO = new NotificationDTO(prop, NotifierConstants.NOTIFICATION_TYPE_NEW_VERSION); notificationDTO.setTenantID(tenantId); notificationDTO.setTenantDomain(tenantDomain); new NotificationExecutor().sendAsyncNotifications(notificationDTO); } } } catch (NotificationException e) { log.error(e.getMessage(), e); } } /** * This method used to validate and set transports in api * @param api * @throws APIManagementException */ private void validateAndSetTransports(API api) throws APIManagementException { String transports = api.getTransports(); if (!StringUtils.isEmpty(transports) && !("null".equalsIgnoreCase(transports))) { if (transports.contains(",")) { StringTokenizer st = new StringTokenizer(transports, ","); while (st.hasMoreTokens()) { checkIfValidTransport(st.nextToken()); } } else { checkIfValidTransport(transports); } } else { api.setTransports(Constants.TRANSPORT_HTTP + ',' + Constants.TRANSPORT_HTTPS); } } /** * This method used to validate and set transports in api product * @param apiProduct * @throws APIManagementException */ private void validateAndSetTransports(APIProduct apiProduct) throws APIManagementException { String transports = apiProduct.getTransports(); if (!StringUtils.isEmpty(transports) && !("null".equalsIgnoreCase(transports))) { if (transports.contains(",")) { StringTokenizer st = new StringTokenizer(transports, ","); while (st.hasMoreTokens()) { checkIfValidTransport(st.nextToken()); } } else { checkIfValidTransport(transports); } } else { apiProduct.setTransports(Constants.TRANSPORT_HTTP + ',' + Constants.TRANSPORT_HTTPS); } } /** * This method used to select security level according to given api Security * @param apiSecurity * @return */ private ArrayList<String> selectSecurityLevels(String apiSecurity) { ArrayList<String> securityLevels = new ArrayList<>(); String[] apiSecurityLevels = apiSecurity.split(","); boolean isOauth2 = false; boolean isMutualSSL = false; boolean isBasicAuth = false; boolean isApiKey = false; boolean isMutualSSLMandatory = false; boolean isOauthBasicAuthMandatory = false; boolean securitySchemeFound = false; for (String apiSecurityLevel : apiSecurityLevels) { if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.DEFAULT_API_SECURITY_OAUTH2)) { isOauth2 = true; securityLevels.add(APIConstants.DEFAULT_API_SECURITY_OAUTH2); securitySchemeFound = true; } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_MUTUAL_SSL)) { isMutualSSL = true; securityLevels.add(APIConstants.API_SECURITY_MUTUAL_SSL); securitySchemeFound = true; } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_BASIC_AUTH)) { isBasicAuth = true; securityLevels.add(APIConstants.API_SECURITY_BASIC_AUTH); securitySchemeFound = true; } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_API_KEY)){ isApiKey = true; securityLevels.add(APIConstants.API_SECURITY_API_KEY); securitySchemeFound = true; } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_MUTUAL_SSL_MANDATORY)) { isMutualSSLMandatory = true; securityLevels.add(APIConstants.API_SECURITY_MUTUAL_SSL_MANDATORY); } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_OAUTH_BASIC_AUTH_API_KEY_MANDATORY)) { isOauthBasicAuthMandatory = true; securityLevels.add(APIConstants.API_SECURITY_OAUTH_BASIC_AUTH_API_KEY_MANDATORY); } } // If no security schema found, set OAuth2 as default if (!securitySchemeFound) { isOauth2 = true; securityLevels.add(APIConstants.DEFAULT_API_SECURITY_OAUTH2); } // If Only OAuth2/Basic-Auth specified, set it as mandatory if (!isMutualSSL && !isOauthBasicAuthMandatory) { securityLevels.add(APIConstants.API_SECURITY_OAUTH_BASIC_AUTH_API_KEY_MANDATORY); } // If Only Mutual SSL specified, set it as mandatory if (!isBasicAuth && !isOauth2 && !isApiKey && !isMutualSSLMandatory) { securityLevels.add(APIConstants.API_SECURITY_MUTUAL_SSL_MANDATORY); } // If OAuth2/Basic-Auth and Mutual SSL protected and not specified the mandatory scheme, // set OAuth2/Basic-Auth as mandatory if ((isOauth2 || isBasicAuth || isApiKey) && isMutualSSL && !isOauthBasicAuthMandatory && !isMutualSSLMandatory) { securityLevels.add(APIConstants.API_SECURITY_OAUTH_BASIC_AUTH_API_KEY_MANDATORY); } return securityLevels; } /** * To validate the API Security options and set it. * * @param api Relevant API that need to be validated. */ private void validateAndSetAPISecurity(API api) { String apiSecurity = APIConstants.DEFAULT_API_SECURITY_OAUTH2; String security = api.getApiSecurity(); if (security!= null) { apiSecurity = security; ArrayList<String> securityLevels = selectSecurityLevels(apiSecurity); apiSecurity = String.join(",", securityLevels); } if (log.isDebugEnabled()) { log.debug("API " + api.getId() + " has following enabled protocols : " + apiSecurity); } api.setApiSecurity(apiSecurity); } /** * To validate the API Security options and set it. * * @param apiProduct Relevant APIProduct that need to be validated. */ private void validateAndSetAPISecurity(APIProduct apiProduct) { String apiSecurity = APIConstants.DEFAULT_API_SECURITY_OAUTH2; String security = apiProduct.getApiSecurity(); if (security!= null) { apiSecurity = security; ArrayList<String> securityLevels = selectSecurityLevels(apiSecurity); apiSecurity = String.join(",", securityLevels); } if (log.isDebugEnabled()) { log.debug("APIProduct " + apiProduct.getId() + " has following enabled protocols : " + apiSecurity); } apiProduct.setApiSecurity(apiSecurity); } private void checkIfValidTransport(String transport) throws APIManagementException { if (!Constants.TRANSPORT_HTTP.equalsIgnoreCase(transport) && !Constants.TRANSPORT_HTTPS.equalsIgnoreCase(transport) && !APIConstants.WS_PROTOCOL.equalsIgnoreCase(transport) && !APIConstants.WSS_PROTOCOL.equalsIgnoreCase(transport)) { handleException("Unsupported Transport [" + transport + ']'); } } private void removeFromGateway(API api, Set<APIRevisionDeployment> gatewaysToRemove, Set<String> environmentsToAdd) { Set<String> environmentsToAddSet = new HashSet<>(environmentsToAdd); Set<String> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : gatewaysToRemove) { environmentsToRemove.add(apiRevisionDeployment.getDeployment()); } environmentsToRemove.removeAll(environmentsToAdd); APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); gatewayManager.unDeployFromGateway(api, tenantDomain, environmentsToRemove); if (log.isDebugEnabled()) { String logMessage = "API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + " deleted from gateway"; log.debug(logMessage); } } public API createNewAPIVersion(String existingApiId, String newVersion, Boolean isDefaultVersion, String organization) throws APIManagementException { API existingAPI = getAPIbyUUID(existingApiId, organization); if (existingAPI == null) { throw new APIMgtResourceNotFoundException("API not found for id " + existingApiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, existingApiId)); } if (newVersion.equals(existingAPI.getId().getVersion())) { throw new APIMgtResourceAlreadyExistsException( "Version " + newVersion + " exists for api " + existingAPI.getId().getApiName()); } existingAPI.setOrganization(organization); APIIdentifier existingAPIId = existingAPI.getId(); String existingAPICreatedTime = existingAPI.getCreatedTime(); String existingAPIStatus = existingAPI.getStatus(); boolean isExsitingAPIdefaultVersion = existingAPI.isDefaultVersion(); String existingContext = existingAPI.getContext(); APIIdentifier newApiId = new APIIdentifier(existingAPI.getId().getProviderName(), existingAPI.getId().getApiName(), newVersion); existingAPI.setUuid(null); existingAPI.setId(newApiId); existingAPI.setStatus(APIConstants.CREATED); existingAPI.setDefaultVersion(isDefaultVersion); // We need to change the context by setting the new version // This is a change that is coming with the context version strategy String existingAPIContextTemplate = existingAPI.getContextTemplate(); existingAPI.setContext(existingAPIContextTemplate.replace("{version}", newVersion)); API newAPI = addAPI(existingAPI); String newAPIId = newAPI.getUuid(); // copy docs List<Documentation> existingDocs = getAllDocumentation(existingApiId, organization); if (existingDocs != null) { for (Documentation documentation : existingDocs) { Documentation newDoc = addDocumentation(newAPIId, documentation, organization); DocumentationContent content = getDocumentationContent(existingApiId, documentation.getId(), organization); // TODO see whether we can optimize this if (content != null) { addDocumentationContent(newAPIId, newDoc.getId(), organization, content); } } } // copy icon ResourceFile icon = getIcon(existingApiId, organization); if (icon != null) { setThumbnailToAPI(newAPIId, icon, organization); } // copy sequences List<Mediation> mediationPolicies = getAllApiSpecificMediationPolicies(existingApiId, organization); if (mediationPolicies != null) { for (Mediation mediation : mediationPolicies) { Mediation policy = getApiSpecificMediationPolicyByPolicyId(existingApiId, mediation.getUuid(), organization); addApiSpecificMediationPolicy(newAPIId, policy, organization); } } // copy wsdl if (existingAPI.getWsdlUrl() != null) { ResourceFile wsdl = getWSDL(existingApiId, organization); if (wsdl != null) { addWSDLResource(newAPIId, wsdl, null, organization); } } // copy graphql definition String graphQLSchema = getGraphqlSchemaDefinition(existingApiId, organization); if(graphQLSchema != null) { saveGraphqlSchemaDefinition(newAPIId, graphQLSchema, organization); } // update old api // revert back to old values before update. existingAPI.setUuid(existingApiId); existingAPI.setStatus(existingAPIStatus); existingAPI.setId(existingAPIId); existingAPI.setContext(existingContext); existingAPI.setCreatedTime(existingAPICreatedTime); // update existing api with setLatest to false existingAPI.setLatest(false); if (isDefaultVersion) { existingAPI.setDefaultVersion(false); } else { existingAPI.setDefaultVersion(isExsitingAPIdefaultVersion); } try { apiPersistenceInstance.updateAPI(new Organization(organization), APIMapper.INSTANCE.toPublisherApi(existingAPI)); } catch (APIPersistenceException e) { throw new APIManagementException("Error while updating API details", e); } return getAPIbyUUID(newAPIId, organization); } public String retrieveServiceKeyByApiId(int apiId, int tenantId) throws APIManagementException { return apiMgtDAO.retrieveServiceKeyByApiId(apiId, tenantId); } private void copySequencesToNewVersion(API api, String newVersion, String pathFlow) throws Exception { String seqFilePath = APIUtil.getSequencePath(api.getId(), pathFlow); if (registry.resourceExists(seqFilePath)) { APIIdentifier newApiId = new APIIdentifier(api.getId().getProviderName(), api.getId().getApiName(), newVersion); String seqNewFilePath = APIUtil.getSequencePath(newApiId, pathFlow); org.wso2.carbon.registry.api.Collection seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(seqFilePath); if (seqCollection != null) { String[] seqChildPaths = seqCollection.getChildren(); for (String seqChildPath : seqChildPaths) { Resource sequence = registry.get(seqChildPath); ResourceFile seqFile = new ResourceFile(sequence.getContentStream(), sequence.getMediaType()); OMElement seqElement = APIUtil.buildOMElement(sequence.getContentStream()); String seqFileName = seqElement.getAttributeValue(new QName("name")); addResourceFile(api.getId(), seqNewFilePath + seqFileName, seqFile); } } } } /** * Removes a given documentation * * @param apiId APIIdentifier * @param docType the type of the documentation * @param docName name of the document * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to remove documentation */ public void removeDocumentation(APIIdentifier apiId, String docName, String docType, String orgId) throws APIManagementException { String docPath = APIUtil.getAPIDocPath(apiId) + docName; try { String apiArtifactId = registry.get(docPath).getUUID(); GenericArtifactManager artifactManager = APIUtil .getArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when deleting documentation of API " + apiId + " document type " + docType + " document name " + docName; log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId); String docFilePath = artifact.getAttribute(APIConstants.DOC_FILE_PATH); if (docFilePath != null) { File tempFile = new File(docFilePath); String fileName = tempFile.getName(); docFilePath = APIUtil.getDocumentationFilePath(apiId, fileName); if (registry.resourceExists(docFilePath)) { registry.delete(docFilePath); } } } catch (RegistryException e) { handleException("Failed to delete documentation", e); } } /** * @param id Identifier * @param docId UUID of the doc * @throws APIManagementException if failed to remove documentation */ public void removeDocumentation(Identifier id, String docId, String orgId) throws APIManagementException { String uuid; if (id.getUUID() == null) { uuid = id.getUUID(); } else { uuid = apiMgtDAO.getUUIDFromIdentifier(id.getProviderName(), id.getName(), id.getVersion(), orgId); } removeDocumentation(uuid, docId, orgId); } @Override public void removeDocumentation(String apiId, String docId, String organization) throws APIManagementException { try { apiPersistenceInstance.deleteDocumentation(new Organization(organization), apiId, docId); } catch (DocumentationPersistenceException e) { throw new APIManagementException("Error while deleting the document " + docId); } } /** * This method used to save the documentation content * * @param api, API * @param documentationName, name of the inline documentation * @param text, content of the inline documentation * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to add the document as a resource to registry */ public void addDocumentationContent(API api, String documentationName, String text) throws APIManagementException { APIIdentifier identifier = api.getId(); String documentationPath = APIUtil.getAPIDocPath(identifier) + documentationName; String contentPath = APIUtil.getAPIDocPath(identifier) + APIConstants.INLINE_DOCUMENT_CONTENT_DIR + RegistryConstants.PATH_SEPARATOR + documentationName; boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } Resource docResource = registry.get(documentationPath); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact docArtifact = artifactManager.getGenericArtifact(docResource.getUUID()); Documentation doc = APIUtil.getDocumentation(docArtifact); Resource docContent; if (!registry.resourceExists(contentPath)) { docContent = registry.newResource(); } else { docContent = registry.get(contentPath); } /* This is a temporary fix for doc content replace issue. We need to add * separate methods to add inline content resource in document update */ if (!APIConstants.NO_CONTENT_UPDATE.equals(text)) { docContent.setContent(text); } docContent.setMediaType(APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE); registry.put(contentPath, docContent); String apiPath = APIUtil.getAPIPath(identifier); String[] authorizedRoles = getAuthorizedRoles(apiPath); String docVisibility = doc.getVisibility().name(); String visibility = api.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } APIUtil.setResourcePermissions(api.getId().getProviderName(),visibility, authorizedRoles,contentPath, registry); } catch (RegistryException e) { String msg = "Failed to add the documentation content of : " + documentationName + " of API :" + identifier.getApiName(); handleException(msg, e); } catch (UserStoreException e) { String msg = "Failed to add the documentation content of : " + documentationName + " of API :" + identifier.getApiName(); handleException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Updates a visibility of the documentation * * @param api API * @param documentation Documentation * @throws APIManagementException if failed to update visibility */ private void updateDocVisibility(API api, Documentation documentation) throws APIManagementException { try { GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry,APIConstants.DOCUMENTATION_KEY); if (artifactManager == null) { String errorMessage = "Artifact manager is null when updating documentation of API " + api.getId().getApiName(); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(documentation.getId()); String[] authorizedRoles = new String[0]; String visibleRolesList = api.getVisibleRoles(); if (visibleRolesList != null) { authorizedRoles = visibleRolesList.split(","); } int tenantId; String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); try { tenantId = getTenantId(tenantDomain); GenericArtifact updateApiArtifact = APIUtil.createDocArtifactContent(artifact, api.getId(), documentation); artifactManager.updateGenericArtifact(updateApiArtifact); APIUtil.clearResourcePermissions(artifact.getPath(), api.getId(), tenantId); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), authorizedRoles, artifact.getPath(), registry); String docType = artifact.getAttribute(APIConstants.DOC_SOURCE_TYPE); if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equals(docType) || APIConstants.IMPLEMENTATION_TYPE_MARKDOWN.equals(docType)) { String docContentPath = APIUtil.getAPIDocPath(api.getId()) + APIConstants .INLINE_DOCUMENT_CONTENT_DIR + RegistryConstants.PATH_SEPARATOR + artifact.getAttribute(APIConstants.DOC_NAME); APIUtil.clearResourcePermissions(docContentPath, api.getId(), tenantId); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), authorizedRoles, docContentPath, registry); } else if (APIConstants.IMPLEMENTATION_TYPE_FILE.equals(docType)) { String docFilePath = APIUtil.getDocumentationFilePath(api.getId(), artifact.getAttribute(APIConstants.DOC_FILE_PATH).split( APIConstants.DOCUMENT_FILE_DIR + RegistryConstants.PATH_SEPARATOR)[1]); APIUtil.clearResourcePermissions(docFilePath, api.getId(), tenantId); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), authorizedRoles, docFilePath, registry); } } catch (UserStoreException e) { throw new APIManagementException("Error in retrieving Tenant Information while updating the " + "visibility of documentations for the API :" + api.getId().getApiName(), e); } } catch (RegistryException e) { handleException("Failed to update visibility of documentation" + api.getId().getApiName(), e); } } /** * Updates a given documentation * * @param apiId id of the document * @param documentation Documentation * @param organization identifier of the organization * @return updated documentation Documentation * @throws APIManagementException if failed to update docs */ public Documentation updateDocumentation(String apiId, Documentation documentation, String organization) throws APIManagementException { if (documentation != null) { org.wso2.carbon.apimgt.persistence.dto.Documentation mappedDoc = DocumentMapper.INSTANCE .toDocumentation(documentation); try { org.wso2.carbon.apimgt.persistence.dto.Documentation updatedDoc = apiPersistenceInstance .updateDocumentation(new Organization(organization), apiId, mappedDoc); if (updatedDoc != null) { return DocumentMapper.INSTANCE.toDocumentation(updatedDoc); } } catch (DocumentationPersistenceException e) { handleException("Failed to add documentation", e); } } return null; } /** * Copies current Documentation into another version of the same API. * * @param toVersion Version to which Documentation should be copied. * @param apiId id of the APIIdentifier * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to copy docs */ public void copyAllDocumentation(APIIdentifier apiId, String toVersion) throws APIManagementException { String oldVersion = APIUtil.getAPIDocPath(apiId); String newVersion = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + apiId.getProviderName() + RegistryConstants.PATH_SEPARATOR + apiId.getApiName() + RegistryConstants.PATH_SEPARATOR + toVersion + RegistryConstants.PATH_SEPARATOR + APIConstants.DOC_DIR; try { Resource resource = registry.get(oldVersion); if (resource instanceof org.wso2.carbon.registry.core.Collection) { String[] docsPaths = ((org.wso2.carbon.registry.core.Collection) resource).getChildren(); for (String docPath : docsPaths) { registry.copy(docPath, newVersion); } } } catch (RegistryException e) { handleException("Failed to copy docs to new version : " + newVersion, e); } } /** * Create an Api * * @param api API * @throws APIManagementException if failed to create API */ protected String createAPI(API api) throws APIManagementException { GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when creating API " + api.getId().getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } if (api.isEndpointSecured() && StringUtils.isEmpty(api.getEndpointUTPassword())) { String errorMessage = "Empty password is given for endpointSecurity when creating API " + api.getId().getApiName(); throw new APIManagementException(errorMessage); } //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); boolean transactionCommitted = false; String apiUUID = null; try { registry.beginTransaction(); GenericArtifact genericArtifact = artifactManager.newGovernanceArtifact(new QName(api.getId().getApiName())); if (genericArtifact == null) { String errorMessage = "Generic artifact is null when creating API " + api.getId().getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = APIUtil.createAPIArtifactContent(genericArtifact, api); artifactManager.addGenericArtifact(artifact); //Attach the API lifecycle artifact.attachLifecycle(APIConstants.API_LIFE_CYCLE); String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); String providerPath = APIUtil.getAPIProviderPath(api.getId()); //provider ------provides----> API registry.addAssociation(providerPath, artifactPath, APIConstants.PROVIDER_ASSOCIATION); Set<String> tagSet = api.getTags(); if (tagSet != null) { for (String tag : tagSet) { registry.applyTag(artifactPath, tag); } } if (APIUtil.isValidWSDLURL(api.getWsdlUrl(), false)) { String path = APIUtil.createWSDL(registry, api); updateWSDLUriInAPIArtifact(path, artifactManager, artifact, artifactPath); } if (api.getWsdlResource() != null) { String path = APIUtil.saveWSDLResource(registry, api); updateWSDLUriInAPIArtifact(path, artifactManager, artifact, artifactPath); } //write API Status to a separate property. This is done to support querying APIs using custom query (SQL) //to gain performance String apiStatus = api.getStatus(); saveAPIStatus(artifactPath, apiStatus); String visibleRolesList = api.getVisibleRoles(); String[] visibleRoles = new String[0]; if (visibleRolesList != null) { visibleRoles = visibleRolesList.split(","); } String publisherAccessControlRoles = api.getAccessControlRoles(); updateRegistryResources(artifactPath, publisherAccessControlRoles, api.getAccessControl(), api.getAdditionalProperties()); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, artifactPath, registry); registry.commitTransaction(); transactionCommitted = true; if (log.isDebugEnabled()) { String logMessage = "API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + " created"; log.debug(logMessage); } apiUUID = artifact.getId(); } catch (RegistryException e) { try { registry.rollbackTransaction(); } catch (RegistryException re) { // Throwing an error here would mask the original exception log.error("Error while rolling back the transaction for API: " + api.getId().getApiName(), re); } handleException("Error while performing registry transaction operation", e); } catch (APIManagementException e) { handleException("Error while creating API", e); } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException ex) { handleException("Error while rolling back the transaction for API: " + api.getId().getApiName(), ex); } } return apiUUID; } /** * Update WSDLUri in the API Registry artifact * * @param wsdlPath WSDL Registry Path * @param artifactManager Artifact Manager * @param artifact API Artifact * @param artifactPath API Artifact path * @throws RegistryException when error occurred while updating WSDL path */ private void updateWSDLUriInAPIArtifact(String wsdlPath, GenericArtifactManager artifactManager, GenericArtifact artifact, String artifactPath) throws RegistryException { if (wsdlPath != null) { artifact.setAttribute(APIConstants.API_OVERVIEW_WSDL, wsdlPath); artifactManager.updateGenericArtifact(artifact); //update the artifact } } /** * Create a documentation * * @param api API * @param documentation Documentation * @throws APIManagementException if failed to add documentation */ private void createDocumentation(API api, Documentation documentation) throws APIManagementException { try { APIIdentifier apiId = api.getId(); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact artifact = artifactManager.newGovernanceArtifact(new QName(documentation.getName())); artifactManager.addGenericArtifact(APIUtil.createDocArtifactContent(artifact, apiId, documentation)); String apiPath = APIUtil.getAPIPath(apiId); String docVisibility = documentation.getVisibility().name(); String[] authorizedRoles = getAuthorizedRoles(apiPath); String visibility = api.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } APIUtil.setResourcePermissions(api.getId().getProviderName(),visibility, authorizedRoles, artifact .getPath(), registry); String docFilePath = artifact.getAttribute(APIConstants.DOC_FILE_PATH); if (docFilePath != null && !"".equals(docFilePath)) { //The docFilePatch comes as /t/tenanatdoman/registry/resource/_system/governance/apimgt/applicationdata.. //We need to remove the /t/tenanatdoman/registry/resource/_system/governance section to set permissions. int startIndex = docFilePath.indexOf(APIConstants.GOVERNANCE) + (APIConstants.GOVERNANCE).length(); String filePath = docFilePath.substring(startIndex, docFilePath.length()); APIUtil.setResourcePermissions(api.getId().getProviderName(),visibility, authorizedRoles, filePath, registry); } documentation.setId(artifact.getId()); } catch (RegistryException e) { handleException("Failed to add documentation", e); } catch (UserStoreException e) { handleException("Failed to add documentation", e); } } @Override public Documentation addDocumentation(String uuid, Documentation documentation, String organization) throws APIManagementException { if (documentation != null) { org.wso2.carbon.apimgt.persistence.dto.Documentation mappedDoc = DocumentMapper.INSTANCE .toDocumentation(documentation); try { org.wso2.carbon.apimgt.persistence.dto.Documentation addedDoc = apiPersistenceInstance.addDocumentation( new Organization(organization), uuid, mappedDoc); if (addedDoc != null) { return DocumentMapper.INSTANCE.toDocumentation(addedDoc); } } catch (DocumentationPersistenceException e) { handleException("Failed to add documentation", e); } } return null; } @Override public boolean isDocumentationExist(String uuid, String docName, String organization) throws APIManagementException { boolean exist = false; UserContext ctx = null; try { DocumentSearchResult result = apiPersistenceInstance.searchDocumentation(new Organization(organization), uuid, 0, 0, "name:" + docName, ctx); if (result != null && result.getDocumentationList() != null && !result.getDocumentationList().isEmpty()) { String returnDocName = result.getDocumentationList().get(0).getName(); if (returnDocName != null && returnDocName.equals(docName)) { exist = true; } } } catch (DocumentationPersistenceException e) { handleException("Failed to search documentation for name " + docName, e); } return exist; } private String[] getAuthorizedRoles(String artifactPath) throws UserStoreException { String resourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + artifactPath); if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService(). getTenantManager().getTenantId(tenantDomain); AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getAuthorizationManager(); return authManager.getAllowedRolesForResource(resourcePath, ActionConstants.GET); } else { RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager (ServiceReferenceHolder.getUserRealm()); return authorizationManager.getAllowedRolesForResource(resourcePath, ActionConstants.GET); } } /** * Returns the details of all the life-cycle changes done per api * * @param apiId API Identifier * @param organization Organization * @return List of lifecycle events per given api * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to get Lifecycle Events */ public List<LifeCycleEvent> getLifeCycleEvents(APIIdentifier apiId, String organization) throws APIManagementException { return apiMgtDAO.getLifeCycleEvents(apiId, organization); } /** * Update the subscription status * * @param apiId API Identifier * @param subStatus Subscription Status * @param appId Application Id * @param organization Organization * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ public void updateSubscription(APIIdentifier apiId, String subStatus, int appId, String organization) throws APIManagementException { apiMgtDAO.updateSubscription(apiId, subStatus, appId, organization); } /** * This method is used to update the subscription * * @param subscribedAPI subscribedAPI object that represents the new subscription detals * @throws APIManagementException if failed to update subscription */ public void updateSubscription(SubscribedAPI subscribedAPI) throws APIManagementException { apiMgtDAO.updateSubscription(subscribedAPI); subscribedAPI = apiMgtDAO.getSubscriptionByUUID(subscribedAPI.getUUID()); Identifier identifier = subscribedAPI.getApiId() != null ? subscribedAPI.getApiId() : subscribedAPI.getProductId(); String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(identifier.getProviderName())); SubscriptionEvent subscriptionEvent = new SubscriptionEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.SUBSCRIPTIONS_UPDATE.name(), tenantId, tenantDomain, subscribedAPI.getSubscriptionId(), subscribedAPI.getUUID(), identifier.getId(), identifier.getUUID(), subscribedAPI.getApplication().getId(), subscribedAPI.getApplication().getUUID(), subscribedAPI.getTier().getName(), subscribedAPI.getSubStatus()); APIUtil.sendNotification(subscriptionEvent, APIConstants.NotifierType.SUBSCRIPTIONS.name()); } public void deleteAPI(String apiUuid, String organization) throws APIManagementException { boolean isError = false; int apiId = -1; API api = null; // get api object by uuid try { api = getAPIbyUUID(apiUuid, organization); } catch (APIManagementException e) { log.error("Error while getting API by uuid for deleting API " + apiUuid + " on organization " + organization); log.debug("Following steps will be skipped while deleting API " + apiUuid + "on organization " + organization + " due to api being null. " + "deleting Resource Registration from key managers, deleting on external API stores, " + "event publishing to gateways, logging audit message, extracting API details for " + "the recommendation system. " ); isError = true; } // get api id from db try { apiId = apiMgtDAO.getAPIID(apiUuid); } catch (APIManagementException e) { log.error("Error while getting API ID from DB for deleting API " + apiUuid + " on organization " + organization, e); log.debug("Following steps will be skipped while deleting the API " + apiUuid + " on organization " + organization + "due to api id being null. cleanup workflow tasks of the API, " + "delete event publishing to gateways"); isError = true; } // DB delete operations if (!isError && api != null) { try { deleteAPIRevisions(apiUuid, organization); deleteAPIFromDB(api); if (log.isDebugEnabled()) { String logMessage = "API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + " successfully removed from the database."; log.debug(logMessage); } } catch (APIManagementException e) { log.error("Error while executing API delete operations on DB for API " + apiUuid + " on organization " + organization, e); isError = true; } } // Deleting Resource Registration from key managers if (api != null && api.getId() != null && api.getId().toString() != null) { Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.deleteRegisteredResourceByAPIId(api.getId().toString()); log.debug("API " + apiUuid + " on organization " + organization + " has successfully removed from the Key Manager " + keyManagerDtoEntry.getKey()); } catch (APIManagementException e) { log.error("Error while deleting Resource Registration for API " + apiUuid + " on organization " + organization + " in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } try { GatewayArtifactsMgtDAO.getInstance().deleteGatewayArtifacts(apiUuid); log.debug("API " + apiUuid + " on organization " + organization + " has successfully removed from the gateway artifacts."); } catch (APIManagementException e) { log.error("Error while executing API delete operation on gateway artifacts for API " + apiUuid, e); isError = true; } try { apiPersistenceInstance.deleteAPI(new Organization(organization), apiUuid); log.debug("API " + apiUuid + " on organization " + organization + " has successfully removed from the persistence instance."); } catch (APIPersistenceException e) { log.error("Error while executing API delete operation on persistence instance for API " + apiUuid + " on organization " + organization, e); isError = true; } // Deleting on external API stores if (api != null) { // gatewayType check is required when API Management is deployed on // other servers to avoid synapse //Check if there are already published external APIStores.If yes,removing APIs from them. Set<APIStore> apiStoreSet; try { apiStoreSet = getPublishedExternalAPIStores(apiUuid); WSO2APIPublisher wso2APIPublisher = new WSO2APIPublisher(); if (apiStoreSet != null && !apiStoreSet.isEmpty()) { for (APIStore store : apiStoreSet) { wso2APIPublisher.deleteFromStore(api.getId(), APIUtil.getExternalAPIStore(store.getName(), tenantId)); } } } catch (APIManagementException e) { log.error("Error while executing API delete operation on external API stores for API " + apiUuid + " on organization " + organization, e); isError = true; } } if (apiId != -1) { try { cleanUpPendingAPIStateChangeTask(apiId); } catch (WorkflowException | APIManagementException e) { log.error("Error while executing API delete operation on cleanup workflow tasks for API " + apiUuid + " on organization " + organization, e); isError = true; } } // Delete event publishing to gateways if (api != null && apiId != -1) { APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_DELETE.name(), tenantId, tenantDomain, api.getId().getApiName(), apiId, api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); } else { log.debug("Event has not published to gateways due to API id has failed to retrieve from DB for API " + apiUuid + " on organization " + organization); } // Logging audit message for API delete if (api != null) { JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, api.getId().getApiName()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, api.getId().getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, api.getId().getProviderName()); APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API, apiLogObject.toString(), APIConstants.AuditLogConstants.DELETED, this.username); } // Extracting API details for the recommendation system if (api != null && recommendationEnvironment != null) { RecommenderEventPublisher extractor = new RecommenderDetailsExtractor(api, tenantDomain, APIConstants.DELETE_API); Thread recommendationThread = new Thread(extractor); recommendationThread.start(); } // if one of the above has failed throw an error if (isError) { throw new APIManagementException("Error while deleting the API " + apiUuid + " on organization " + organization); } } /** * Deletes API from the database and delete local scopes and resource scope attachments from KM. * * @param api API to delete * @throws APIManagementException if fails to delete the API */ private void deleteAPIFromDB(API api) throws APIManagementException { APIIdentifier apiIdentifier = api.getId(); int tenantId = APIUtil.getTenantId(APIUtil.replaceEmailDomainBack(apiIdentifier.getProviderName())); String tenantDomain = APIUtil.getTenantDomainFromTenantId(tenantId); // Get local scopes for the given API which are not already assigned for different versions of the same API Set<String> localScopeKeysToDelete = apiMgtDAO.getUnversionedLocalScopeKeysForAPI(api.getUuid(), tenantId); // Get the URI Templates for the given API to detach the resources scopes from Set<URITemplate> uriTemplates = apiMgtDAO.getURITemplatesOfAPI(api.getUuid()); // Detach all the resource scopes from the API resources in KM Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.detachResourceScopes(api, uriTemplates); if (log.isDebugEnabled()) { log.debug("Resource scopes are successfully detached for the API : " + apiIdentifier + " from Key Manager :" + keyManagerDtoEntry.getKey() + "."); } // remove the local scopes from the KM for (String localScope : localScopeKeysToDelete) { keyManager.deleteScope(localScope); } if (log.isDebugEnabled()) { log.debug("Local scopes are successfully deleted for the API : " + apiIdentifier + " from Key Manager : " + keyManagerDtoEntry.getKey() + "."); } } catch (APIManagementException e) { log.error("Error while Detach and Delete Scope from Key Manager " + keyManagerDtoEntry.getKey(), e); } } } deleteScopes(localScopeKeysToDelete, tenantId); apiMgtDAO.deleteAPI(api.getUuid()); if (log.isDebugEnabled()) { log.debug("API : " + apiIdentifier + " is successfully deleted from the database and Key Manager."); } } private void deleteAPIRevisions(String apiUUID, String organization) throws APIManagementException { List<APIRevision> apiRevisionList = apiMgtDAO.getRevisionsListByAPIUUID(apiUUID); for (APIRevision apiRevision : apiRevisionList) { if (apiRevision.getApiRevisionDeploymentList().size() != 0) { undeployAPIRevisionDeployment(apiUUID, apiRevision.getRevisionUUID(), apiRevision.getApiRevisionDeploymentList(), organization); } deleteAPIRevision(apiUUID, apiRevision.getRevisionUUID(), organization); } } private void deleteAPIProductRevisions(String apiProductUUID, String organization) throws APIManagementException { List<APIRevision> apiRevisionList = apiMgtDAO.getRevisionsListByAPIUUID(apiProductUUID); for (APIRevision apiRevision : apiRevisionList) { if (apiRevision.getApiRevisionDeploymentList().size() != 0) { undeployAPIProductRevisionDeployment(apiProductUUID, apiRevision.getRevisionUUID(), apiRevision.getApiRevisionDeploymentList()); } deleteAPIProductRevision(apiProductUUID, apiRevision.getRevisionUUID(), organization); } } public Map<Documentation, API> searchAPIsByDoc(String searchTerm, String searchType) throws APIManagementException { return searchAPIDoc(registry, tenantId, username, searchTerm); } /** * Search APIs based on given search term * * @param searchTerm * @param searchType * @param providerId * @throws APIManagementException */ @Deprecated public List<API> searchAPIs(String searchTerm, String searchType, String providerId) throws APIManagementException { List<API> foundApiList = new ArrayList<API>(); String regex = "(?i)[\\w.|-]*" + searchTerm.trim() + "[\\w.|-]*"; Pattern pattern; Matcher matcher; String apiConstant = null; try { if (providerId != null) { List<API> apiList = getAPIsByProvider(providerId); if (apiList == null || apiList.isEmpty()) { return apiList; } pattern = Pattern.compile(regex); for (API api : apiList) { if ("Name".equalsIgnoreCase(searchType)) { apiConstant = api.getId().getApiName(); } else if ("Provider".equalsIgnoreCase(searchType)) { apiConstant = api.getId().getProviderName(); } else if ("Version".equalsIgnoreCase(searchType)) { apiConstant = api.getId().getVersion(); } else if ("Context".equalsIgnoreCase(searchType)) { apiConstant = api.getContext(); } else if ("Status".equalsIgnoreCase(searchType)) { apiConstant = api.getStatus(); } else if (APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE.equalsIgnoreCase(searchType)) { apiConstant = api.getDescription(); } if (apiConstant != null) { matcher = pattern.matcher(apiConstant); if (matcher.find()) { foundApiList.add(api); } } } } else { foundApiList = searchAPIs(searchTerm, searchType); } } catch (APIManagementException e) { handleException("Failed to search APIs with type", e); } Collections.sort(foundApiList, new APINameComparator()); return foundApiList; } /** * Search APIs * * @param searchTerm * @param searchType * @return * @throws APIManagementException */ @Deprecated private List<API> searchAPIs(String searchTerm, String searchType) throws APIManagementException { List<API> apiList = new ArrayList<API>(); Pattern pattern; Matcher matcher; String searchCriteria = APIConstants.API_OVERVIEW_NAME; boolean isTenantFlowStarted = false; String userName = this.username; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(userName); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager != null) { if ("Name".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_NAME; } else if ("Version".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_VERSION; } else if ("Context".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_CONTEXT; } else if (APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE.equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_DESCRIPTION; } else if ("Provider".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_PROVIDER; searchTerm = searchTerm.replaceAll("@", "-AT-"); } else if ("Status".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_STATUS; } String regex = "(?i)[\\w.|-]*" + searchTerm.trim() + "[\\w.|-]*"; pattern = Pattern.compile(regex); if ("Subcontext".equalsIgnoreCase(searchType)) { List<API> allAPIs = getAllAPIs(); for (API api : allAPIs) { Set<URITemplate> urls = api.getUriTemplates(); for (URITemplate url : urls) { matcher = pattern.matcher(url.getUriTemplate()); if (matcher.find()) { apiList.add(api); break; } } } } else { GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts(); if (genericArtifacts == null || genericArtifacts.length == 0) { return apiList; } for (GenericArtifact artifact : genericArtifacts) { String value = artifact.getAttribute(searchCriteria); if (value != null) { matcher = pattern.matcher(value); if (matcher.find()) { API resultAPI = getAPI(artifact); if (resultAPI != null) { apiList.add(resultAPI); } } } } } } else { String errorMessage = "Failed to retrieve artifact manager when searching APIs for term " + searchTerm + " in tenant domain " + tenantDomain; log.error(errorMessage); throw new APIManagementException(errorMessage); } } catch (RegistryException e) { handleException("Failed to search APIs with type", e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return apiList; } /** * Update the Tier Permissions * * @param tierName Tier Name * @param permissionType Permission Type * @param roles Roles * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ public void updateTierPermissions(String tierName, String permissionType, String roles) throws APIManagementException { apiMgtDAO.updateTierPermissions(tierName, permissionType, roles, tenantId); } @Override public void deleteTierPermissions(String tierName) throws APIManagementException { apiMgtDAO.deleteThrottlingPermissions(tierName, tenantId); } @Override public Set<TierPermissionDTO> getTierPermissions() throws APIManagementException { return apiMgtDAO.getTierPermissions(tenantId); } @Override public TierPermissionDTO getThrottleTierPermission(String tierName) throws APIManagementException { return apiMgtDAO.getThrottleTierPermission(tierName, tenantId); } /** * Update the Tier Permissions * * @param tierName Tier Name * @param permissionType Permission Type * @param roles Roles * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ public void updateThrottleTierPermissions(String tierName, String permissionType, String roles) throws APIManagementException { apiMgtDAO.updateThrottleTierPermissions(tierName, permissionType, roles, tenantId); } @Override public Set<TierPermissionDTO> getThrottleTierPermissions() throws APIManagementException { return apiMgtDAO.getThrottleTierPermissions(tenantId); } /** * Publish API to external stores given by external store Ids * * @param api API which need to published * @param externalStoreIds APIStore Ids which need to publish API * @throws APIManagementException If failed to publish to external stores */ @Override public boolean publishToExternalAPIStores(API api, List<String> externalStoreIds) throws APIManagementException { Set<APIStore> inputStores = new HashSet<>(); boolean apiOlderVersionExist = false; APIIdentifier apiIdentifier = api.getId(); for (String store : externalStoreIds) { if (StringUtils.isNotEmpty(store)) { APIStore inputStore = APIUtil.getExternalAPIStore(store, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); if (inputStore == null) { String errorMessage = "Error while publishing to external stores. Invalid External Store Id: " + store; log.error(errorMessage); ExceptionCodes exceptionCode = ExceptionCodes.EXTERNAL_STORE_ID_NOT_FOUND; throw new APIManagementException(errorMessage, new ErrorItem(exceptionCode.getErrorMessage(), errorMessage, exceptionCode.getErrorCode(), exceptionCode.getHttpStatusCode())); } inputStores.add(inputStore); } } Set<String> versions = getAPIVersions(apiIdentifier.getProviderName(), apiIdentifier.getName(), api.getOrganization()); APIVersionStringComparator comparator = new APIVersionStringComparator(); for (String tempVersion : versions) { if (comparator.compare(tempVersion, apiIdentifier.getVersion()) < 0) { apiOlderVersionExist = true; break; } } return updateAPIsInExternalAPIStores(api, inputStores, apiOlderVersionExist); } /** * When enabled publishing to external APIStores support,publish the API to external APIStores * * @param api The API which need to published * @param apiStoreSet The APIStores set to which need to publish API * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ @Override public void publishToExternalAPIStores(API api, Set<APIStore> apiStoreSet, boolean apiOlderVersionExist) throws APIManagementException { Set<APIStore> publishedStores = new HashSet<APIStore>(); StringBuilder errorStatus = new StringBuilder("Failure to publish to External Stores : "); boolean failure = false; for (APIStore store : apiStoreSet) { org.wso2.carbon.apimgt.api.model.APIPublisher publisher = store.getPublisher(); try { // First trying to publish the API to external APIStore boolean published; String version = ApiMgtDAO.getInstance().getLastPublishedAPIVersionFromAPIStore(api.getId(), store.getName()); if (apiOlderVersionExist && version != null && !(publisher instanceof WSO2APIPublisher)) { published = publisher.createVersionedAPIToStore(api, store, version); publisher.updateToStore(api, store); } else { published = publisher.publishToStore(api, store); } if (published) { // If published,then save to database. publishedStores.add(store); } } catch (APIManagementException e) { failure = true; log.error(e); errorStatus.append(store.getDisplayName()).append(','); } } if (!publishedStores.isEmpty()) { addExternalAPIStoresDetails(api.getUuid(), publishedStores); } if (failure) { throw new APIManagementException(errorStatus.substring(0, errorStatus.length() - 2)); } } /** * Update the API to external APIStores and database * * @param api The API which need to published * @param apiStoreSet The APIStores set to which need to publish API * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ @Override public boolean updateAPIsInExternalAPIStores(API api, Set<APIStore> apiStoreSet, boolean apiOlderVersionExist) throws APIManagementException { Set<APIStore> publishedStores = getPublishedExternalAPIStores(api.getUuid()); Set<APIStore> notPublishedAPIStores = new HashSet<APIStore>(); Set<APIStore> updateApiStores = new HashSet<APIStore>(); Set<APIStore> removedApiStores = new HashSet<APIStore>(); StringBuilder errorStatus = new StringBuilder("Failed to update External Stores : "); boolean failure = false; if (publishedStores != null) { removedApiStores.addAll(publishedStores); removedApiStores.removeAll(apiStoreSet); } for (APIStore apiStore : apiStoreSet) { boolean publishedToStore = false; if (publishedStores != null) { for (APIStore store : publishedStores) { //If selected external store in edit page is already saved in db if (store.equals(apiStore)) { //Check if there's a modification happened in config file external store definition try { if (!isAPIAvailableInExternalAPIStore(api, apiStore)) { // API is not available continue; } } catch (APIManagementException e) { failure = true; log.error(e); errorStatus.append(store.getDisplayName()).append(','); } publishedToStore = true; //Already the API has published to external APIStore //In this case,the API is already added to external APIStore,thus we don't need to publish it again. //We need to update the API in external Store. //Include to update API in external APIStore updateApiStores.add(APIUtil.getExternalAPIStore(store.getName(), tenantId)); } } } if (!publishedToStore) { //If the API has not yet published to selected external APIStore notPublishedAPIStores.add(APIUtil.getExternalAPIStore(apiStore.getName(), tenantId)); } } //Publish API to external APIStore which are not yet published try { publishToExternalAPIStores(api, notPublishedAPIStores, apiOlderVersionExist); } catch (APIManagementException e) { handleException("Failed to publish API to external Store. ", e); } //Update the APIs which are already exist in the external APIStore updateAPIInExternalAPIStores(api, updateApiStores); //Update database saved published APIStore details updateExternalAPIStoresDetails(api.getUuid(), updateApiStores); deleteFromExternalAPIStores(api, removedApiStores); if (failure) { throw new APIManagementException(errorStatus.substring(0, errorStatus.length() - 2)); } return true; } private void deleteFromExternalAPIStores(API api, Set<APIStore> removedApiStores) throws APIManagementException { Set<APIStore> removalCompletedStores = new HashSet<APIStore>(); StringBuilder errorStatus = new StringBuilder("Failed to delete from External Stores : "); boolean failure = false; for (APIStore store : removedApiStores) { org.wso2.carbon.apimgt.api.model.APIPublisher publisher = APIUtil.getExternalAPIStore(store.getName(), tenantId).getPublisher(); try { boolean deleted = publisher.deleteFromStore( api.getId(), APIUtil.getExternalAPIStore(store.getName(), tenantId)); if (deleted) { // If the attempt is successful, database will be // changed deleting the External store mappings. removalCompletedStores.add(store); } } catch (APIManagementException e) { failure = true; log.error(e); errorStatus.append(store.getDisplayName()).append(','); } } if (!removalCompletedStores.isEmpty()) { removeExternalAPIStoreDetails(api.getUuid(), removalCompletedStores); } if (failure) { throw new APIManagementException(errorStatus.substring(0, errorStatus.length() - 2)); } } private void removeExternalAPIStoreDetails(String id, Set<APIStore> removalCompletedStores) throws APIManagementException { apiMgtDAO.deleteExternalAPIStoresDetails(id, removalCompletedStores); } private boolean isAPIAvailableInExternalAPIStore(API api, APIStore store) throws APIManagementException { org.wso2.carbon.apimgt.api.model.APIPublisher publisher = store.getPublisher(); return publisher.isAPIAvailable(api, store); } /** * When enabled publishing to external APIStores support,updating the API existing in external APIStores * * @param api The API which need to published * @param apiStoreSet The APIStores set to which need to publish API * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ private void updateAPIInExternalAPIStores(API api, Set<APIStore> apiStoreSet) throws APIManagementException { if (apiStoreSet != null && !apiStoreSet.isEmpty()) { StringBuilder errorStatus = new StringBuilder("Failed to update External Stores : "); boolean failure = false; for (APIStore store : apiStoreSet) { try { org.wso2.carbon.apimgt.api.model.APIPublisher publisher = store.getPublisher(); publisher.updateToStore(api, store); } catch (APIManagementException e) { failure = true; log.error(e); errorStatus.append(store.getDisplayName()).append(','); } } if (failure) { throw new APIManagementException(errorStatus.substring(0, errorStatus.length() - 2)); } } } /** * When enabled publishing to external APIStores support,update external apistores data in db * * @param apiId The API Identifier which need to update in db * @param apiStoreSet The APIStores set which need to update in db * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ private void updateExternalAPIStoresDetails(String apiId, Set<APIStore> apiStoreSet) throws APIManagementException { apiMgtDAO.updateExternalAPIStoresDetails(apiId, apiStoreSet); } private boolean addExternalAPIStoresDetails(String apiId, Set<APIStore> apiStoreSet) throws APIManagementException { return apiMgtDAO.addExternalAPIStoresDetails(apiId, apiStoreSet); } /** * When enabled publishing to external APIStores support,get all the external apistore details which are * published and stored in db and which are not unpublished * * @param apiId The API uuid which need to update in db * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ @Override public Set<APIStore> getExternalAPIStores(String apiId) throws APIManagementException { if (APIUtil.isAPIsPublishToExternalAPIStores(tenantId)) { SortedSet<APIStore> sortedApiStores = new TreeSet<APIStore>(new APIStoreNameComparator()); Set<APIStore> publishedStores = apiMgtDAO.getExternalAPIStoresDetails(apiId); sortedApiStores.addAll(publishedStores); return APIUtil.getExternalAPIStores(sortedApiStores, tenantId); } else { return null; } } /** * When enabled publishing to external APIStores support,get only the published external apistore details which are * stored in db * * @param apiId The API uuid which need to update in db * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ @Override public Set<APIStore> getPublishedExternalAPIStores(String apiId) throws APIManagementException { Set<APIStore> storesSet; SortedSet<APIStore> configuredAPIStores = new TreeSet<>(new APIStoreNameComparator()); configuredAPIStores.addAll(APIUtil.getExternalStores(tenantId)); if (APIUtil.isAPIsPublishToExternalAPIStores(tenantId)) { storesSet = apiMgtDAO.getExternalAPIStoresDetails(apiId); //Retains only the stores that contained in configuration storesSet.retainAll(configuredAPIStores); return storesSet; } return null; } /** * Get stored custom inSequences from governanceSystem registry * * @throws APIManagementException */ public List<String> getCustomInSequences(APIIdentifier apiIdentifier) throws APIManagementException { List<String> sequenceList = new ArrayList<String>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection inSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION); if (inSeqCollection != null) { String[] inSeqChildPaths = inSeqCollection.getChildren(); Arrays.sort(inSeqChildPaths); for (String inSeqChildPath : inSeqChildPaths) { Resource inSequence = registry.get(inSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(inSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + inSeqChildPath + "' from the registry.", e); } } } } String customInSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, "in"); if (registry.resourceExists(customInSeqFileLocation)) { org.wso2.carbon.registry.api.Collection inSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customInSeqFileLocation); if (inSeqCollection != null) { String[] inSeqChildPaths = inSeqCollection.getChildren(); Arrays.sort(inSeqChildPaths); for (String inSeqChildPath : inSeqChildPaths) { Resource inSequence = registry.get(inSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(inSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + inSeqChildPath + "' from the registry.", e); } } } } } catch (Exception e) { handleException("Issue is in getting custom InSequences from the Registry", e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return sequenceList; } /** * Get stored custom outSequences from governanceSystem registry * * @throws APIManagementException */ public List<String> getCustomOutSequences(APIIdentifier apiIdentifier) throws APIManagementException { List<String> sequenceList = new ArrayList<String>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection outSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION); if (outSeqCollection != null) { String[] outSeqChildPaths = outSeqCollection.getChildren(); Arrays.sort(outSeqChildPaths); for (String childPath : outSeqChildPaths) { Resource outSequence = registry.get(childPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + childPath + "' from the registry.", e); } } } } String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, "out"); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection outSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customOutSeqFileLocation); if (outSeqCollection != null) { String[] outSeqChildPaths = outSeqCollection.getChildren(); Arrays.sort(outSeqChildPaths); for (String outSeqChildPath : outSeqChildPaths) { Resource outSequence = registry.get(outSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + outSeqChildPath + "' from the registry.", e); } } } } } catch (Exception e) { handleException("Issue is in getting custom OutSequences from the Registry", e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return sequenceList; } /** * Get the list of Custom InSequences including API defined in sequences. * * @return List of available sequences * @throws APIManagementException */ public List<String> getCustomInSequences() throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection inSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION); if (inSeqCollection != null) { String[] inSeqChildPaths = inSeqCollection.getChildren(); Arrays.sort(inSeqChildPaths); for (String inSeqChildPath : inSeqChildPaths) { Resource inSequence = registry.get(inSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(inSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + inSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } return new ArrayList<>(sequenceList); } /** * Get the list of Custom InSequences including API defined in sequences. * * @return List of available sequences * @throws APIManagementException */ public List<String> getCustomOutSequences() throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection outSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION); if (outSeqCollection != null) { String[] outSeqChildPaths = outSeqCollection.getChildren(); Arrays.sort(outSeqChildPaths); for (String outSeqChildPath : outSeqChildPaths) { Resource outSequence = registry.get(outSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + outSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } return new ArrayList<>(sequenceList); } /** * Get stored custom fault sequences from governanceSystem registry * * @throws APIManagementException */ @Deprecated public List<String> getCustomFaultSequences() throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource outSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + faultSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } return new ArrayList<>(sequenceList); } /** * Get stored custom fault sequences from governanceSystem registry * * @throws APIManagementException */ public List<String> getCustomFaultSequences(APIIdentifier apiIdentifier) throws APIManagementException { List<String> sequenceList = new ArrayList<String>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get( APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource outSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + faultSeqChildPath + "' from the registry.", e); } } } } String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customOutSeqFileLocation); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource faultSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(faultSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + faultSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT + " sequences of " + apiIdentifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return sequenceList; } /** * Get the list of Custom in sequences of API. * * @return List of in sequences * @throws APIManagementException */ public List<String> getCustomApiInSequences(APIIdentifier apiIdentifier) throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); String customInSeqFileLocation = APIUtil .getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); if (registry.resourceExists(customInSeqFileLocation)) { org.wso2.carbon.registry.api.Collection inSeqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(customInSeqFileLocation); if (inSeqCollection != null) { String[] inSeqChildPaths = inSeqCollection.getChildren(); Arrays.sort(inSeqChildPaths); for (String inSeqChildPath : inSeqChildPaths) { Resource outSequence = registry.get(inSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + inSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN + " sequences of " + apiIdentifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return new ArrayList<>(sequenceList); } /** * Get the list of Custom out Sequences of API * * @return List of available out sequences * @throws APIManagementException */ public List<String> getCustomApiOutSequences(APIIdentifier apiIdentifier) throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection outSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customOutSeqFileLocation); if (outSeqCollection != null) { String[] outSeqChildPaths = outSeqCollection.getChildren(); Arrays.sort(outSeqChildPaths); for (String outSeqChildPath : outSeqChildPaths) { Resource outSequence = registry.get(outSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + outSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT + " sequences of " + apiIdentifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return new ArrayList<>(sequenceList); } /** * Get the list of Custom Fault Sequences of API. * * @return List of available fault sequences * @throws APIManagementException */ public List<String> getCustomApiFaultSequences(APIIdentifier apiIdentifier) throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(customOutSeqFileLocation); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource faultSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(faultSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + faultSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT + " sequences of " + apiIdentifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return new ArrayList<>(sequenceList); } @Override public boolean isSynapseGateway() throws APIManagementException { APIManagerConfiguration config = getAPIManagerConfiguration(); String gatewayType = config.getFirstProperty(APIConstants.API_GATEWAY_TYPE); return APIConstants.API_GATEWAY_TYPE_SYNAPSE.equalsIgnoreCase(gatewayType); } @Override public void validateResourceThrottlingTiers(API api, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Validating x-throttling tiers defined in swagger api definition resource"); } Set<URITemplate> uriTemplates = api.getUriTemplates(); checkResourceThrottlingTiersInURITemplates(uriTemplates, tenantDomain); } @Override public void validateResourceThrottlingTiers(String swaggerContent, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Validating x-throttling tiers defined in swagger api definition resource"); } APIDefinition apiDefinition = OASParserUtil.getOASParser(swaggerContent); Set<URITemplate> uriTemplates = apiDefinition.getURITemplates(swaggerContent); checkResourceThrottlingTiersInURITemplates(uriTemplates, tenantDomain); } @Override public void validateAPIThrottlingTier(API api, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Validating apiLevelPolicy defined in the API"); } Map<String, Tier> tierMap = APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain); if (tierMap != null) { String apiLevelPolicy = api.getApiLevelPolicy(); if (apiLevelPolicy != null && !tierMap.containsKey(apiLevelPolicy)) { String message = "Invalid API level throttling tier " + apiLevelPolicy + " found in api definition"; throw new APIManagementException(message); } } } @Override public void validateProductThrottlingTier(APIProduct apiProduct, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Validating productLevelPolicy defined in the API Product"); } Map<String, Tier> tierMap = APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain); if (tierMap != null) { String apiLevelPolicy = apiProduct.getProductLevelPolicy(); if (apiLevelPolicy != null && !tierMap.containsKey(apiLevelPolicy)) { String message = "Invalid Product level throttling tier " + apiLevelPolicy + " found in api definition"; throw new APIManagementException(message); } } } private void checkResourceThrottlingTiersInURITemplates(Set<URITemplate> uriTemplates, String tenantDomain) throws APIManagementException { Map<String, Tier> tierMap = APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain); if (tierMap != null) { for (URITemplate template : uriTemplates) { if (template.getThrottlingTier() != null && !tierMap.containsKey(template.getThrottlingTier())) { String message = "Invalid x-throttling tier " + template.getThrottlingTier() + " found in api definition for resource " + template.getHTTPVerb() + " " + template.getUriTemplate(); log.error(message); throw new APIManagementException(message); } } } } @Override public void saveSwagger20Definition(APIIdentifier apiId, String jsonText, String organization) throws APIManagementException { String uuid; if (apiId.getUUID() != null) { uuid = apiId.getUUID(); } else { uuid = apiMgtDAO.getUUIDFromIdentifier(apiId.getProviderName(), apiId.getApiName(), apiId.getVersion(), organization); } saveSwaggerDefinition(uuid, jsonText, organization); } @Override public void saveSwaggerDefinition(API api, String jsonText, String organization) throws APIManagementException { String apiId; if (api.getUuid() != null) { apiId = api.getUuid(); } else if (api.getId().getUUID() != null) { apiId = api.getId().getUUID(); } else { apiId = apiMgtDAO.getUUIDFromIdentifier(api.getId().getProviderName(), api.getId().getApiName(), api.getId().getVersion(), organization); } saveSwaggerDefinition(apiId, jsonText, organization); } @Override public void saveSwaggerDefinition(String apiId, String jsonText, String organization) throws APIManagementException { try { apiPersistenceInstance.saveOASDefinition(new Organization(organization), apiId, jsonText); } catch (OASPersistenceException e) { throw new APIManagementException("Error while persisting OAS definition ", e); } } @Override public void saveGraphqlSchemaDefinition(API api, String schemaDefinition) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); GraphQLSchemaDefinition schemaDef = new GraphQLSchemaDefinition(); schemaDef.saveGraphQLSchemaDefinition(api, schemaDefinition, registry); } finally { PrivilegedCarbonContext.endTenantFlow(); } } @Override public void saveSwagger20Definition(APIProductIdentifier apiId, String jsonText) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); saveAPIDefinition(getAPIProduct(apiId), jsonText, registry); } finally { PrivilegedCarbonContext.endTenantFlow(); } } @Override public void saveSwaggerDefinition(APIProduct apiProduct, String jsonText) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); saveAPIDefinition(apiProduct, jsonText, registry); } finally { PrivilegedCarbonContext.endTenantFlow(); } } private void saveAPIDefinition(APIProduct apiProduct, String apiDefinitionJSON, org.wso2.carbon.registry.api.Registry registry) throws APIManagementException { String apiName = apiProduct.getId().getName(); String apiVersion = apiProduct.getId().getVersion(); String apiProviderName = apiProduct.getId().getProviderName(); try { String resourcePath = APIUtil.getAPIProductOpenAPIDefinitionFilePath(apiName, apiVersion, apiProviderName); resourcePath = resourcePath + APIConstants.API_OAS_DEFINITION_RESOURCE_NAME; org.wso2.carbon.registry.api.Resource resource; if (!registry.resourceExists(resourcePath)) { resource = registry.newResource(); } else { resource = registry.get(resourcePath); } resource.setContent(apiDefinitionJSON); resource.setMediaType("application/json"); registry.put(resourcePath, resource); String[] visibleRoles = null; if (apiProduct.getVisibleRoles() != null) { visibleRoles = apiProduct.getVisibleRoles().split(","); } //Need to set anonymous if the visibility is public APIUtil.clearResourcePermissions(resourcePath, apiProduct.getId(), ((UserRegistry) registry).getTenantId()); APIUtil.setResourcePermissions(apiProviderName, apiProduct.getVisibility(), visibleRoles, resourcePath); } catch (org.wso2.carbon.registry.api.RegistryException e) { handleException("Error while adding Swagger Definition for " + apiName + '-' + apiVersion, e); } } @Override public void addAPIProductSwagger(String productId, Map<API, List<APIProductResource>> apiToProductResourceMapping, APIProduct apiProduct, String orgId) throws APIManagementException { APIDefinition parser = new OAS3Parser(); SwaggerData swaggerData = new SwaggerData(apiProduct); String apiProductSwagger = parser.generateAPIDefinition(swaggerData); apiProductSwagger = OASParserUtil.updateAPIProductSwaggerOperations(apiToProductResourceMapping, apiProductSwagger); saveSwaggerDefinition(productId, apiProductSwagger, orgId); apiProduct.setDefinition(apiProductSwagger); } @Override public void updateAPIProductSwagger(String productId, Map<API, List<APIProductResource>> apiToProductResourceMapping, APIProduct apiProduct, String orgId) throws APIManagementException { APIDefinition parser = new OAS3Parser(); SwaggerData updatedData = new SwaggerData(apiProduct); String existingProductSwagger = getAPIDefinitionOfAPIProduct(apiProduct); String updatedProductSwagger = parser.generateAPIDefinition(updatedData, existingProductSwagger); updatedProductSwagger = OASParserUtil.updateAPIProductSwaggerOperations(apiToProductResourceMapping, updatedProductSwagger); saveSwaggerDefinition(productId, updatedProductSwagger, orgId); apiProduct.setDefinition(updatedProductSwagger); } public APIStateChangeResponse changeLifeCycleStatus(APIIdentifier apiIdentifier, String action, String organization) throws APIManagementException, FaultGatewaysException { APIStateChangeResponse response = new APIStateChangeResponse(); try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(this.tenantDomain, true); GenericArtifact apiArtifact = getAPIArtifact(apiIdentifier); String targetStatus; if (apiArtifact != null) { String providerName = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiContext = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT); String apiType = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_TYPE); String apiVersion = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); String currentStatus = apiArtifact.getLifecycleState(); String uuid = apiMgtDAO.getUUIDFromIdentifier(apiIdentifier, organization); int apiId = apiMgtDAO.getAPIID(uuid); WorkflowStatus apiWFState = null; WorkflowDTO wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null) { apiWFState = wfDTO.getStatus(); } // if the workflow has started, then executor should not fire again if (!WorkflowStatus.CREATED.equals(apiWFState)) { try { WorkflowProperties workflowProperties = getAPIManagerConfiguration().getWorkflowProperties(); WorkflowExecutor apiStateWFExecutor = WorkflowExecutorFactory.getInstance() .getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_API_STATE); APIStateWorkflowDTO apiStateWorkflow = new APIStateWorkflowDTO(); apiStateWorkflow.setApiCurrentState(currentStatus); apiStateWorkflow.setApiLCAction(action); apiStateWorkflow.setApiName(apiName); apiStateWorkflow.setApiContext(apiContext); apiStateWorkflow.setApiType(apiType); apiStateWorkflow.setApiVersion(apiVersion); apiStateWorkflow.setApiProvider(providerName); apiStateWorkflow.setCallbackUrl(workflowProperties.getWorkflowCallbackAPI()); apiStateWorkflow.setExternalWorkflowReference(apiStateWFExecutor.generateUUID()); apiStateWorkflow.setTenantId(tenantId); apiStateWorkflow.setTenantDomain(this.tenantDomain); apiStateWorkflow.setWorkflowType(WorkflowConstants.WF_TYPE_AM_API_STATE); apiStateWorkflow.setStatus(WorkflowStatus.CREATED); apiStateWorkflow.setCreatedTime(System.currentTimeMillis()); apiStateWorkflow.setWorkflowReference(Integer.toString(apiId)); apiStateWorkflow.setInvoker(this.username); apiStateWorkflow.setApiUUID(uuid); String workflowDescription = "Pending lifecycle state change action: " + action; apiStateWorkflow.setWorkflowDescription(workflowDescription); WorkflowResponse workflowResponse = apiStateWFExecutor.execute(apiStateWorkflow); response.setWorkflowResponse(workflowResponse); } catch (WorkflowException e) { handleException("Failed to execute workflow for life cycle status change : " + e.getMessage(), e); } // get the workflow state once the executor is executed. wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null) { apiWFState = wfDTO.getStatus(); response.setStateChangeStatus(apiWFState.toString()); } else { response.setStateChangeStatus(WorkflowStatus.APPROVED.toString()); } } // only change the lifecycle if approved // apiWFState is null when simple wf executor is used because wf state is not stored in the db. if (WorkflowStatus.APPROVED.equals(apiWFState) || apiWFState == null) { targetStatus = ""; apiArtifact.invokeAction(action, APIConstants.API_LIFE_CYCLE); targetStatus = apiArtifact.getLifecycleState(); if (!currentStatus.equals(targetStatus)) { apiMgtDAO.recordAPILifeCycleEvent(apiId, currentStatus.toUpperCase(), targetStatus.toUpperCase(), this.username, this.tenantId); } if (log.isDebugEnabled()) { String logMessage = "API Status changed successfully. API Name: " + apiIdentifier.getApiName() + ", API Version " + apiIdentifier.getVersion() + ", New Status : " + targetStatus; log.debug(logMessage); } APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_LIFECYCLE_CHANGE.name(), tenantId, tenantDomain, apiName, apiId, uuid, apiVersion, apiType, apiContext, providerName, targetStatus); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); return response; } } } catch (GovernanceException e) { String cause = e.getCause().getMessage(); if (!StringUtils.isEmpty(cause)) { if (cause.contains("FaultGatewaysException:")) { Map<String, Map<String, String>> faultMap = new HashMap<String, Map<String, String>>(); String faultJsonString; if (!StringUtils.isEmpty(cause) && cause.split("FaultGatewaysException:").length > 1) { faultJsonString = cause.split("FaultGatewaysException:")[1]; try { JSONObject faultGatewayJson = (JSONObject) new JSONParser().parse(faultJsonString); faultMap.putAll(faultGatewayJson); throw new FaultGatewaysException(faultMap); } catch (ParseException e1) { log.error("Couldn't parse the Failed Environment json", e); handleException("Couldn't parse the Failed Environment json : " + e.getMessage(), e); } } } else if (cause.contains("APIManagementException:")) { // This exception already logged from APIExecutor class hence this no need to logged again handleException( "Failed to change the life cycle status : " + cause.split("APIManagementException:")[1], e); } else { /* This exception already logged from APIExecutor class hence this no need to logged again This block handles the all the exception which not have custom cause message*/ handleException("Failed to change the life cycle status : " + e.getMessage(), e); } } return response; } finally { PrivilegedCarbonContext.endTenantFlow(); } return response; } @Override public APIStateChangeResponse changeLifeCycleStatus(String orgId, String uuid, String action, Map<String, Boolean> checklist) throws APIManagementException, FaultGatewaysException { APIStateChangeResponse response = new APIStateChangeResponse(); try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(this.tenantDomain, true); //GenericArtifact apiArtifact = getAPIArtifact(apiIdentifier); API api = getLightweightAPIByUUID(uuid, orgId); String targetStatus; if (api != null) { String providerName = api.getId().getProviderName(); String apiName = api.getId().getApiName(); String apiContext = api.getContext(); String apiType = api.getType();//check String apiVersion = api.getId().getVersion(); String currentStatus = api.getStatus(); int apiId = apiMgtDAO.getAPIID(api.getUuid()); WorkflowStatus apiWFState = null; WorkflowDTO wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null) { apiWFState = wfDTO.getStatus(); } // if the workflow has started, then executor should not fire again if (!WorkflowStatus.CREATED.equals(apiWFState)) { try { WorkflowProperties workflowProperties = getAPIManagerConfiguration().getWorkflowProperties(); WorkflowExecutor apiStateWFExecutor = WorkflowExecutorFactory.getInstance() .getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_API_STATE); APIStateWorkflowDTO apiStateWorkflow = new APIStateWorkflowDTO(); apiStateWorkflow.setApiCurrentState(currentStatus); apiStateWorkflow.setApiLCAction(action); apiStateWorkflow.setApiName(apiName); apiStateWorkflow.setApiContext(apiContext); apiStateWorkflow.setApiType(apiType); apiStateWorkflow.setApiVersion(apiVersion); apiStateWorkflow.setApiProvider(providerName); apiStateWorkflow.setCallbackUrl(workflowProperties.getWorkflowCallbackAPI()); apiStateWorkflow.setExternalWorkflowReference(apiStateWFExecutor.generateUUID()); apiStateWorkflow.setTenantId(tenantId); apiStateWorkflow.setTenantDomain(this.tenantDomain); apiStateWorkflow.setWorkflowType(WorkflowConstants.WF_TYPE_AM_API_STATE); apiStateWorkflow.setStatus(WorkflowStatus.CREATED); apiStateWorkflow.setCreatedTime(System.currentTimeMillis()); apiStateWorkflow.setWorkflowReference(Integer.toString(apiId)); apiStateWorkflow.setInvoker(this.username); apiStateWorkflow.setApiUUID(uuid); String workflowDescription = "Pending lifecycle state change action: " + action; apiStateWorkflow.setWorkflowDescription(workflowDescription); WorkflowResponse workflowResponse = apiStateWFExecutor.execute(apiStateWorkflow); response.setWorkflowResponse(workflowResponse); } catch (WorkflowException e) { handleException("Failed to execute workflow for life cycle status change : " + e.getMessage(), e); } // get the workflow state once the executor is executed. wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null) { apiWFState = wfDTO.getStatus(); response.setStateChangeStatus(apiWFState.toString()); } else { response.setStateChangeStatus(WorkflowStatus.APPROVED.toString()); } } // only change the lifecycle if approved // apiWFState is null when simple wf executor is used because wf state is not stored in the db. if (WorkflowStatus.APPROVED.equals(apiWFState) || apiWFState == null) { targetStatus = ""; //RegistryLCManager.getInstance().getStateForTransition(action); //apiArtifact.invokeAction(action, APIConstants.API_LIFE_CYCLE); //targetStatus = apiArtifact.getLifecycleState(); targetStatus = LCManagerFactory.getInstance().getLCManager().getStateForTransition(action); apiPersistenceInstance.changeAPILifeCycle(new Organization(orgId), uuid, targetStatus); api.setOrganization(orgId); changeLifeCycle(api, currentStatus, targetStatus, checklist); //Sending Notifications to existing subscribers if (APIConstants.PUBLISHED.equals(targetStatus)) { sendEmailNotification(api); } // if retired Delete Existing Gateway Deployments. if (APIConstants.RETIRED.equals(targetStatus)){ deleteAPIRevisions(uuid, orgId); } if (!currentStatus.equalsIgnoreCase(targetStatus)) { apiMgtDAO.recordAPILifeCycleEvent(apiId, currentStatus.toUpperCase(), targetStatus.toUpperCase(), this.username, this.tenantId); } if (log.isDebugEnabled()) { String logMessage = "API Status changed successfully. API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + ", New Status : " + targetStatus; log.debug(logMessage); } APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_LIFECYCLE_CHANGE.name(), tenantId, tenantDomain, apiName, apiId, uuid,apiVersion, apiType, apiContext, APIUtil.replaceEmailDomainBack(providerName), targetStatus); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); // Extracting API details for the recommendation system if (recommendationEnvironment != null) { RecommenderEventPublisher extractor = new RecommenderDetailsExtractor(api, tenantDomain, APIConstants.ADD_API); Thread recommendationThread = new Thread(extractor); recommendationThread.start(); } return response; } } } catch (APIPersistenceException e) { handleException("Error while accessing persistance layer", e); } catch (PersistenceException e) { handleException("Error while accessing lifecycle information ", e); } finally { PrivilegedCarbonContext.endTenantFlow(); } return response; } private void changeLifeCycle(API api, String currentState, String targetState, Map<String, Boolean> checklist) throws APIManagementException, FaultGatewaysException { String oldStatus = currentState.toUpperCase(); String newStatus = (targetState != null) ? targetState.toUpperCase() : targetState; boolean isCurrentCreatedOrPrototyped = APIConstants.CREATED.equals(oldStatus) || APIConstants.PROTOTYPED.equals(oldStatus); boolean isStateTransitionToPublished = isCurrentCreatedOrPrototyped && APIConstants.PUBLISHED.equals(newStatus); if (newStatus != null) { // only allow the executor to be used with default LC states transition // check only the newStatus so this executor can be used for LC state change from // custom state to default api state if (isStateTransitionToPublished) { Set<Tier> tiers = api.getAvailableTiers(); String endPoint = api.getEndpointConfig(); String apiSecurity = api.getApiSecurity(); boolean isOauthProtected = apiSecurity == null || apiSecurity.contains(APIConstants.DEFAULT_API_SECURITY_OAUTH2); if (APIConstants.API_TYPE_WEBSUB.equals(api.getType()) || endPoint != null && endPoint.trim().length() > 0) { if (isOauthProtected && (tiers == null || tiers.size() <= 0)) { throw new APIManagementException("Failed to publish service to API store while executing " + "APIExecutor. No Tiers selected"); } } else { throw new APIManagementException("Failed to publish service to API store while executing" + " APIExecutor. No endpoint selected"); } } // push the state change to gateway Map<String, String> failedGateways = propergateAPIStatusChangeToGateways(newStatus, api); if (APIConstants.PUBLISHED.equals(newStatus) || !oldStatus.equals(newStatus)) { //TODO has registry access //if the API is websocket and if default version is selected, update the other versions if (APIConstants.APITransportType.WS.toString().equals(api.getType()) && api.isDefaultVersion()) { Set<String> versions = getAPIVersions(api.getId().getProviderName(), api.getId().getName(), api.getOrganization()); for (String version : versions) { if (version.equals(api.getId().getVersion())) { continue; } String uuid = APIUtil.getUUIDFromIdentifier( new APIIdentifier(api.getId().getProviderName(), api.getId().getName(), version), api.getOrganization()); API otherApi = getLightweightAPIByUUID(uuid, api.getOrganization()); APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, otherApi.getId().getApiName(), otherApi.getId().getId(), otherApi.getUuid(), version, api.getType(), otherApi.getContext(), otherApi.getId().getProviderName(), otherApi.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); } } } if (log.isDebugEnabled()) { String logMessage = "Publish changed status to the Gateway. API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + ", API Context: " + api.getContext() + ", New Status : " + newStatus; log.debug(logMessage); } // update api related information for state change updateAPIforStateChange(api, currentState, newStatus, failedGateways); if (log.isDebugEnabled()) { String logMessage = "API related information successfully updated. API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + ", API Context: " + api.getContext() + ", New Status : " + newStatus; log.debug(logMessage); } } else { throw new APIManagementException("Invalid Lifecycle status for default APIExecutor :" + targetState); } boolean deprecateOldVersions = false; boolean makeKeysForwardCompatible = true; // If the API status is CREATED/PROTOTYPED ,check for check list items of lifecycle if (isCurrentCreatedOrPrototyped) { if (checklist != null) { if(checklist.containsKey(APIConstants.DEPRECATE_CHECK_LIST_ITEM)) { deprecateOldVersions = checklist.get(APIConstants.DEPRECATE_CHECK_LIST_ITEM); } if(checklist.containsKey(APIConstants.RESUBSCRIBE_CHECK_LIST_ITEM)) { makeKeysForwardCompatible = !checklist.get(APIConstants.RESUBSCRIBE_CHECK_LIST_ITEM); } } } if (isStateTransitionToPublished) { if (makeKeysForwardCompatible) { makeAPIKeysForwardCompatible(api); } if (deprecateOldVersions) { String provider = APIUtil.replaceEmailDomain(api.getId().getProviderName()); String apiName = api.getId().getName(); List<API> apiList = getAPIVersionsByProviderAndName(provider, apiName, api.getOrganization()); APIVersionComparator versionComparator = new APIVersionComparator(); for (API oldAPI : apiList) { if (oldAPI.getId().getApiName().equals(api.getId().getApiName()) && versionComparator.compare(oldAPI, api) < 0 && (APIConstants.PUBLISHED.equals(oldAPI.getStatus()))) { changeLifeCycleStatus(tenantDomain, oldAPI.getUuid(), APIConstants.API_LC_ACTION_DEPRECATE, null); } } } } } private List<API> getAPIVersionsByProviderAndName(String provider, String apiName, String organization) throws APIManagementException { Set<String> list = apiMgtDAO.getUUIDsOfAPIVersions(apiName, provider); List<API> apiVersions = new ArrayList<API>(); for (String uuid : list) { try { PublisherAPI publisherAPI = apiPersistenceInstance .getPublisherAPI(new Organization(organization), uuid); if (APIConstants.API_PRODUCT.equals(publisherAPI.getType())) { // skip api products continue; } API api = new API(new APIIdentifier(publisherAPI.getProviderName(), publisherAPI.getApiName(), publisherAPI.getVersion())); api.setUuid(uuid); api.setStatus(publisherAPI.getStatus()); apiVersions.add(api); } catch (APIPersistenceException e) { throw new APIManagementException("Error while retrieving the api ", e); } } return apiVersions; } /** * To get the API artifact from the registry * * @param apiIdentifier API den * @return API artifact, if the relevant artifact exists * @throws APIManagementException API Management Exception. */ protected GenericArtifact getAPIArtifact(APIIdentifier apiIdentifier) throws APIManagementException { return APIUtil.getAPIArtifact(apiIdentifier, registry); } @Override public boolean changeAPILCCheckListItems(APIIdentifier apiIdentifier, int checkItem, boolean checkItemValue) throws APIManagementException { String providerTenantMode = apiIdentifier.getProviderName(); boolean success = false; boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } GenericArtifact apiArtifact = getAPIArtifact(apiIdentifier); String status = null; try { if (apiArtifact != null) { if (checkItemValue && !apiArtifact.isLCItemChecked(checkItem, APIConstants.API_LIFE_CYCLE)) { apiArtifact.checkLCItem(checkItem, APIConstants.API_LIFE_CYCLE); } else if (!checkItemValue && apiArtifact.isLCItemChecked(checkItem, APIConstants.API_LIFE_CYCLE)) { apiArtifact.uncheckLCItem(checkItem, APIConstants.API_LIFE_CYCLE); } success = true; } } catch (GovernanceException e) { handleException("Error while setting registry lifecycle checklist items for the API: " + apiIdentifier.getApiName(), e); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return success; } /** * This method is to set a lifecycle check list item given the APIIdentifier and the checklist item name. * If the given item not in the allowed lifecycle check items list or item is already checked, this will stay * silent and return false. Otherwise, the checklist item will be updated and returns true. * * @param apiIdentifier APIIdentifier * @param checkItemName Name of the checklist item * @param checkItemValue Value to be set to the checklist item * @return boolean value representing success not not * @throws APIManagementException */ @Override public boolean checkAndChangeAPILCCheckListItem(APIIdentifier apiIdentifier, String checkItemName, boolean checkItemValue) throws APIManagementException { Map<String, Object> lifeCycleData = getAPILifeCycleData(apiIdentifier); if (lifeCycleData != null && lifeCycleData.get(APIConstants.LC_CHECK_ITEMS) != null && lifeCycleData .get(APIConstants.LC_CHECK_ITEMS) instanceof ArrayList) { List checkListItems = (List) lifeCycleData.get(APIConstants.LC_CHECK_ITEMS); for (Object item : checkListItems) { if (item instanceof CheckListItem) { CheckListItem checkListItem = (CheckListItem) item; int index = Integer.parseInt(checkListItem.getOrder()); if (checkListItem.getName().equals(checkItemName)) { changeAPILCCheckListItems(apiIdentifier, index, checkItemValue); return true; } } } } return false; } @Override /* * This method returns the lifecycle data for an API including current state,next states. * * @param apiId APIIdentifier * @return Map<String,Object> a map with lifecycle data */ public Map<String, Object> getAPILifeCycleData(APIIdentifier apiId) throws APIManagementException { String path = APIUtil.getAPIPath(apiId); Map<String, Object> lcData = new HashMap<String, Object>(); String providerTenantMode = apiId.getProviderName(); boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } Resource apiSourceArtifact = registry.get(path); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when getting lifecycle data for API " + apiId; log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact( apiSourceArtifact.getUUID()); //Get all the actions corresponding to current state of the api artifact String[] actions = artifact.getAllLifecycleActions(APIConstants.API_LIFE_CYCLE); //Put next states into map lcData.put(APIConstants.LC_NEXT_STATES, actions); String lifeCycleState = artifact.getLifecycleState(); lcData.put(APIConstants.LC_STATUS, lifeCycleState); LifecycleBean bean; bean = LifecycleBeanPopulator.getLifecycleBean(path, (UserRegistry) registry, configRegistry); if (bean != null) { ArrayList<CheckListItem> checkListItems = new ArrayList<CheckListItem>(); ArrayList<String> permissionList = new ArrayList<String>(); //Get lc properties Property[] lifecycleProps = bean.getLifecycleProperties(); //Get roles of the current session holder String[] roleNames = bean.getRolesOfUser(); for (Property property : lifecycleProps) { String propName = property.getKey(); String[] propValues = property.getValues(); //Check for permission properties if any exists if (propValues != null && propValues.length != 0) { if (propName.startsWith(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX) && propName.endsWith(APIConstants.LC_PROPERTY_PERMISSION_SUFFIX) && propName.contains(APIConstants.API_LIFE_CYCLE)) { for (String role : roleNames) { for (String propValue : propValues) { String key = propName.replace(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX, "") .replace(APIConstants.LC_PROPERTY_PERMISSION_SUFFIX, ""); if (propValue.equals(role)) { permissionList.add(key); } else if (propValue.startsWith(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX) && propValue.endsWith(APIConstants.LC_PROPERTY_PERMISSION_SUFFIX)) { permissionList.add(key); } } } } } } //Check for lifecycle checklist item properties defined for (Property property : lifecycleProps) { String propName = property.getKey(); String[] propValues = property.getValues(); if (propValues != null && propValues.length != 0) { CheckListItem checkListItem = new CheckListItem(); checkListItem.setVisible("false"); if (propName.startsWith(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX) && propName.endsWith(APIConstants.LC_PROPERTY_ITEM_SUFFIX) && propName.contains(APIConstants.API_LIFE_CYCLE)) { if (propValues.length > 2) { for (String param : propValues) { if (param.startsWith(APIConstants.LC_STATUS)) { checkListItem.setLifeCycleStatus(param.substring(7)); } else if (param.startsWith(APIConstants.LC_CHECK_ITEM_NAME)) { checkListItem.setName(param.substring(5)); } else if (param.startsWith(APIConstants.LC_CHECK_ITEM_VALUE)) { checkListItem.setValue(param.substring(6)); } else if (param.startsWith(APIConstants.LC_CHECK_ITEM_ORDER)) { checkListItem.setOrder(param.substring(6)); } } } String key = propName.replace(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX, ""). replace(APIConstants.LC_PROPERTY_ITEM_SUFFIX, ""); if (permissionList.contains(key)) { //Set visible to true if the checklist item permits checkListItem.setVisible("true"); } } if (checkListItem.matchLifeCycleStatus(lifeCycleState)) { checkListItems.add(checkListItem); } } } lcData.put("items", checkListItems); } } catch (Exception e) { handleException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return lcData; } public Map<String, Object> getAPILifeCycleData(String uuid, String orgId) throws APIManagementException { Map<String, Object> lcData = new HashMap<String, Object>(); API api = getLightweightAPIByUUID(uuid, orgId); List<String> actionsList; try { actionsList = LCManagerFactory.getInstance().getLCManager().getAllowedActionsForState(api.getStatus()); if (actionsList != null) { String[] actionsArray = new String[actionsList.size()]; actionsArray = actionsList.toArray(actionsArray); lcData.put(APIConstants.LC_NEXT_STATES, actionsArray); } ArrayList<CheckListItem> checkListItems = new ArrayList<CheckListItem>(); List<String> checklistItemsList = LCManagerFactory.getInstance().getLCManager() .getCheckListItemsForState(api.getStatus()); if (checklistItemsList != null) { for (String name : checklistItemsList) { CheckListItem item = new CheckListItem(); item.setName(name); item.setValue("false"); checkListItems.add(item); } } lcData.put("items", checkListItems); } catch (PersistenceException e) { throw new APIManagementException("Error while parsing the lifecycle ", e); } String status = api.getStatus(); status = status.substring(0, 1).toUpperCase() + status.substring(1).toLowerCase(); // First letter capital lcData.put(APIConstants.LC_STATUS, status); return lcData; } @Override public String getAPILifeCycleStatus(APIIdentifier apiIdentifier) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(this.tenantDomain, true); GenericArtifact apiArtifact = APIUtil.getAPIArtifact(apiIdentifier, registry); if (apiArtifact == null) { String errorMessage = "API artifact is null when retrieving lifecycle status of API " + apiIdentifier.getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } return apiArtifact.getLifecycleState(); } catch (GovernanceException e) { handleException("Failed to get the life cycle status : " + e.getMessage(), e); return null; } finally { PrivilegedCarbonContext.endTenantFlow(); } } @Override public Map<String, Object> getAllPaginatedAPIs(String tenantDomain, int start, int end) throws APIManagementException { Map<String, Object> result = new HashMap<String, Object>(); List<API> apiSortedList = new ArrayList<API>(); int totalLength = 0; boolean isTenantFlowStarted = false; try { String paginationLimit = getAPIManagerConfiguration() .getFirstProperty(APIConstants.API_PUBLISHER_APIS_PER_PAGE); // If the Config exists use it to set the pagination limit final int maxPaginationLimit; if (paginationLimit != null) { // The additional 1 added to the maxPaginationLimit is to help us determine if more // APIs may exist so that we know that we are unable to determine the actual total // API count. We will subtract this 1 later on so that it does not interfere with // the logic of the rest of the application int pagination = Integer.parseInt(paginationLimit); // Because the store jaggery pagination logic is 10 results per a page we need to set pagination // limit to at least 11 or the pagination done at this level will conflict with the store pagination // leading to some of the APIs not being displayed if (pagination < 11) { pagination = 11; log.warn( "Value of '" + APIConstants.API_PUBLISHER_APIS_PER_PAGE + "' is too low, defaulting to 11"); } maxPaginationLimit = start + pagination + 1; } // Else if the config is not specifed we go with default functionality and load all else { maxPaginationLimit = Integer.MAX_VALUE; } Registry userRegistry; boolean isTenantMode = (tenantDomain != null); if ((isTenantMode && this.tenantDomain == null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) { if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); isTenantFlowStarted = true; } int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); APIUtil.loadTenantRegistry(tenantId); userRegistry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId); PrivilegedCarbonContext.getThreadLocalCarbonContext() .setUsername(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME); } else { userRegistry = registry; PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); } PaginationContext.init(start, end, "ASC", APIConstants.PROVIDER_OVERVIEW_NAME, maxPaginationLimit); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY); if (artifactManager != null) { List<GovernanceArtifact> genericArtifacts = null; if (isAccessControlRestrictionEnabled && !APIUtil.hasPermission(userNameWithoutChange, APIConstants .Permissions.APIM_ADMIN)) { genericArtifacts = GovernanceUtils.findGovernanceArtifacts(getUserRoleListQuery(), userRegistry, APIConstants.API_RXT_MEDIA_TYPE, true); } else { genericArtifacts = GovernanceUtils .findGovernanceArtifacts(new HashMap<String, List<String>>(), userRegistry, APIConstants.API_RXT_MEDIA_TYPE); } totalLength = PaginationContext.getInstance().getLength(); if (genericArtifacts == null || genericArtifacts.isEmpty()) { result.put("apis", apiSortedList); result.put("totalLength", totalLength); return result; } // Check to see if we can speculate that there are more APIs to be loaded if (maxPaginationLimit == totalLength) { // performance hit --totalLength; // Remove the additional 1 we added earlier when setting max pagination limit } int tempLength = 0; for (GovernanceArtifact artifact : genericArtifacts) { API api = APIUtil.getAPI(artifact); if (api != null) { apiSortedList.add(api); } tempLength++; if (tempLength >= totalLength) { break; } } Collections.sort(apiSortedList, new APINameComparator()); } else { String errorMessage = "Failed to retrieve artifact manager when getting paginated APIs of tenant " + tenantDomain; log.error(errorMessage); throw new APIManagementException(errorMessage); } } catch (RegistryException e) { handleException("Failed to get all APIs", e); } catch (UserStoreException e) { handleException("Failed to get all APIs", e); } finally { PaginationContext.destroy(); if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } result.put("apis", apiSortedList); result.put("totalLength", totalLength); return result; } private boolean isTenantDomainNotMatching(String tenantDomain) { if (this.tenantDomain != null) { return !(this.tenantDomain.equals(tenantDomain)); } return true; } /** * Deploy policy to global CEP and persist the policy object * * @param policy policy object */ public void addPolicy(Policy policy) throws APIManagementException { if (policy instanceof APIPolicy) { APIPolicy apiPolicy = (APIPolicy) policy; //Check if there's a policy exists before adding the new policy Policy existingPolicy = getAPIPolicy(userNameWithoutChange, apiPolicy.getPolicyName()); if (existingPolicy != null) { handleException("Advanced Policy with name " + apiPolicy.getPolicyName() + " already exists"); } apiPolicy.setUserLevel(PolicyConstants.ACROSS_ALL); apiPolicy = apiMgtDAO.addAPIPolicy(apiPolicy); List<Integer> addedConditionGroupIds = new ArrayList<>(); for (Pipeline pipeline : apiPolicy.getPipelines()) { addedConditionGroupIds.add(pipeline.getId()); } APIPolicyEvent apiPolicyEvent = new APIPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_CREATE.name(), tenantId, apiPolicy.getTenantDomain(), apiPolicy.getPolicyId(), apiPolicy.getPolicyName(), apiPolicy.getDefaultQuotaPolicy().getType(), addedConditionGroupIds, null); APIUtil.sendNotification(apiPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof ApplicationPolicy) { ApplicationPolicy appPolicy = (ApplicationPolicy) policy; //Check if there's a policy exists before adding the new policy Policy existingPolicy = getApplicationPolicy(userNameWithoutChange, appPolicy.getPolicyName()); if (existingPolicy != null) { handleException("Application Policy with name " + appPolicy.getPolicyName() + " already exists"); } apiMgtDAO.addApplicationPolicy(appPolicy); //policy id is not set. retrieving policy to get the id. ApplicationPolicy retrievedPolicy = apiMgtDAO.getApplicationPolicy(appPolicy.getPolicyName(), tenantId); ApplicationPolicyEvent applicationPolicyEvent = new ApplicationPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_CREATE.name(), tenantId, appPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), appPolicy.getPolicyName(), appPolicy.getDefaultQuotaPolicy().getType()); APIUtil.sendNotification(applicationPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof SubscriptionPolicy) { SubscriptionPolicy subPolicy = (SubscriptionPolicy) policy; //Check if there's a policy exists before adding the new policy Policy existingPolicy = getSubscriptionPolicy(userNameWithoutChange, subPolicy.getPolicyName()); if (existingPolicy != null) { handleException("Subscription Policy with name " + subPolicy.getPolicyName() + " already exists"); } apiMgtDAO.addSubscriptionPolicy(subPolicy); String monetizationPlan = subPolicy.getMonetizationPlan(); Map<String, String> monetizationPlanProperties = subPolicy.getMonetizationPlanProperties(); if (StringUtils.isNotBlank(monetizationPlan) && MapUtils.isNotEmpty(monetizationPlanProperties)) { createMonetizationPlan(subPolicy); } //policy id is not set. retrieving policy to get the id. SubscriptionPolicy retrievedPolicy = apiMgtDAO.getSubscriptionPolicy(subPolicy.getPolicyName(), tenantId); SubscriptionPolicyEvent subscriptionPolicyEvent = new SubscriptionPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_CREATE.name(), tenantId, subPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), subPolicy.getPolicyName(), subPolicy.getDefaultQuotaPolicy().getType(), subPolicy.getRateLimitCount(),subPolicy.getRateLimitTimeUnit(), subPolicy.isStopOnQuotaReach(), subPolicy.getGraphQLMaxDepth(),subPolicy.getGraphQLMaxComplexity(),subPolicy.getSubscriberCount()); APIUtil.sendNotification(subscriptionPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof GlobalPolicy) { GlobalPolicy globalPolicy = (GlobalPolicy) policy; // checking if policy already exist Policy existingPolicy = getGlobalPolicy(globalPolicy.getPolicyName()); if (existingPolicy != null) { throw new APIManagementException("Policy name already exists"); } apiMgtDAO.addGlobalPolicy(globalPolicy); publishKeyTemplateEvent(globalPolicy.getKeyTemplate(), "add"); GlobalPolicy retrievedPolicy = apiMgtDAO.getGlobalPolicy(globalPolicy.getPolicyName()); GlobalPolicyEvent globalPolicyEvent = new GlobalPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_CREATE.name(), tenantId, globalPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), globalPolicy.getPolicyName()); APIUtil.sendNotification(globalPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else { String msg = "Policy type " + policy.getClass().getName() + " is not supported"; log.error(msg); throw new UnsupportedPolicyTypeException(msg); } } @Override public void configureMonetizationInAPIArtifact(API api) throws APIManagementException { Organization org = new Organization(api.getOrganization()); try { apiPersistenceInstance.updateAPI(org, APIMapper.INSTANCE.toPublisherApi(api)); } catch (APIPersistenceException e) { throw new APIManagementException("Error while updating API details", e); } } @Override public void configureMonetizationInAPIProductArtifact(APIProduct apiProduct) throws APIManagementException { boolean transactionCommitted = false; try { registry.beginTransaction(); String apiArtifactId = registry.get(APIUtil.getAPIProductPath(apiProduct.getId())).getId(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { handleException("Artifact manager is null when updating monetization data for API ID " + apiProduct.getId()); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiProduct.getUuid()); //set monetization status (i.e - enabled or disabled) artifact.setAttribute(APIConstants.Monetization.API_MONETIZATION_STATUS, Boolean.toString(apiProduct.getMonetizationStatus())); //clear existing monetization properties artifact.removeAttribute(APIConstants.Monetization.API_MONETIZATION_PROPERTIES); //set new additional monetization data if (apiProduct.getMonetizationProperties() != null) { artifact.setAttribute(APIConstants.Monetization.API_MONETIZATION_PROPERTIES, apiProduct.getMonetizationProperties().toJSONString()); } artifactManager.updateGenericArtifact(artifact); registry.commitTransaction(); transactionCommitted = true; } catch (Exception e) { try { registry.rollbackTransaction(); } catch (RegistryException re) { handleException("Error while rolling back the transaction (monetization status update) for API product : " + apiProduct.getId().getName(), re); } handleException("Error while performing registry transaction (monetization status update) operation", e); } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException e) { handleException("Error occurred while rolling back the transaction (monetization status update).", e); } } } /** * This methods creates a monetization plan for a given subscription policy * * @param subPolicy subscription policy * @return true if successful, false otherwise * @throws APIManagementException if failed to create a monetization plan */ private boolean createMonetizationPlan(SubscriptionPolicy subPolicy) throws APIManagementException { Monetization monetizationImplementation = getMonetizationImplClass(); if (monetizationImplementation != null) { try { return monetizationImplementation.createBillingPlan(subPolicy); } catch (MonetizationException e) { APIUtil.handleException("Failed to create monetization plan for : " + subPolicy.getPolicyName(), e); } } return false; } /** * This methods updates the monetization plan for a given subscription policy * * @param subPolicy subscription policy * @return true if successful, false otherwise * @throws APIManagementException if failed to update the plan */ private boolean updateMonetizationPlan(SubscriptionPolicy subPolicy) throws APIManagementException { Monetization monetizationImplementation = getMonetizationImplClass(); if (monetizationImplementation != null) { try { return monetizationImplementation.updateBillingPlan(subPolicy); } catch (MonetizationException e) { APIUtil.handleException("Failed to update monetization plan for : " + subPolicy.getPolicyName(), e); } } return false; } /** * This methods delete the monetization plan for a given subscription policy * * @param subPolicy subscription policy * @return true if successful, false otherwise * @throws APIManagementException if failed to delete the plan */ private boolean deleteMonetizationPlan(SubscriptionPolicy subPolicy) throws APIManagementException { Monetization monetizationImplementation = getMonetizationImplClass(); if (monetizationImplementation != null) { try { return monetizationImplementation.deleteBillingPlan(subPolicy); } catch (MonetizationException e) { APIUtil.handleException("Failed to delete monetization plan of : " + subPolicy.getPolicyName(), e); } } return false; } /** * This methods loads the monetization implementation class * * @return monetization implementation class * @throws APIManagementException if failed to load monetization implementation class */ public Monetization getMonetizationImplClass() throws APIManagementException { APIManagerConfiguration configuration = org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder. getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(); Monetization monetizationImpl = null; if (configuration == null) { log.error("API Manager configuration is not initialized."); } else { String monetizationImplClass = configuration.getFirstProperty(APIConstants.Monetization.MONETIZATION_IMPL); if (monetizationImplClass == null) { monetizationImpl = new DefaultMonetizationImpl(); } else { try { monetizationImpl = (Monetization) APIUtil.getClassInstance(monetizationImplClass); } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) { APIUtil.handleException("Failed to load monetization implementation class.", e); } } } return monetizationImpl; } public void updatePolicy(Policy policy) throws APIManagementException { String oldKeyTemplate = null; String newKeyTemplate = null; if (policy instanceof APIPolicy) { APIPolicy apiPolicy = (APIPolicy) policy; apiPolicy.setUserLevel(PolicyConstants.ACROSS_ALL); //TODO this has done due to update policy method not deleting the second level entries when delete on cascade //TODO Need to fix appropriately List<Pipeline> pipelineList = apiPolicy.getPipelines(); if (pipelineList != null && pipelineList.size() != 0) { Iterator<Pipeline> pipelineIterator = pipelineList.iterator(); while (pipelineIterator.hasNext()) { Pipeline pipeline = pipelineIterator.next(); if (!pipeline.isEnabled()) { pipelineIterator.remove(); } else { if (pipeline.getConditions() != null && pipeline.getConditions().size() != 0) { Iterator<Condition> conditionIterator = pipeline.getConditions().iterator(); while (conditionIterator.hasNext()) { Condition condition = conditionIterator.next(); if (JavaUtils.isFalseExplicitly(condition.getConditionEnabled())) { conditionIterator.remove(); } } } else { pipelineIterator.remove(); } } } } APIPolicy existingPolicy = apiMgtDAO.getAPIPolicy(policy.getPolicyName(), policy.getTenantId()); apiPolicy = apiMgtDAO.updateAPIPolicy(apiPolicy); //TODO rename level to resource or appropriate name APIManagerConfiguration config = getAPIManagerConfiguration(); if (log.isDebugEnabled()) { log.debug("Calling invalidation cache for API Policy for tenant "); } String policyContext = APIConstants.POLICY_CACHE_CONTEXT + "/t/" + apiPolicy.getTenantDomain() + "/"; invalidateResourceCache(policyContext, null, Collections.EMPTY_SET); List<Integer> addedConditionGroupIds = new ArrayList<>(); List<Integer> deletedConditionGroupIds = new ArrayList<>(); for (Pipeline pipeline : existingPolicy.getPipelines()) { deletedConditionGroupIds.add(pipeline.getId()); } for (Pipeline pipeline : apiPolicy.getPipelines()) { addedConditionGroupIds.add(pipeline.getId()); } APIPolicyEvent apiPolicyEvent = new APIPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_UPDATE.name(), tenantId, apiPolicy.getTenantDomain(), apiPolicy.getPolicyId(), apiPolicy.getPolicyName(), apiPolicy.getDefaultQuotaPolicy().getType(), addedConditionGroupIds, deletedConditionGroupIds); APIUtil.sendNotification(apiPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof ApplicationPolicy) { ApplicationPolicy appPolicy = (ApplicationPolicy) policy; apiMgtDAO.updateApplicationPolicy(appPolicy); //policy id is not set. retrieving policy to get the id. ApplicationPolicy retrievedPolicy = apiMgtDAO.getApplicationPolicy(appPolicy.getPolicyName(), tenantId); ApplicationPolicyEvent applicationPolicyEvent = new ApplicationPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_UPDATE.name(), tenantId, appPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), appPolicy.getPolicyName(), appPolicy.getDefaultQuotaPolicy().getType()); APIUtil.sendNotification(applicationPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof SubscriptionPolicy) { SubscriptionPolicy subPolicy = (SubscriptionPolicy) policy; apiMgtDAO.updateSubscriptionPolicy(subPolicy); String monetizationPlan = subPolicy.getMonetizationPlan(); Map<String, String> monetizationPlanProperties = subPolicy.getMonetizationPlanProperties(); //call the monetization extension point to create plans (if any) if (StringUtils.isNotBlank(monetizationPlan) && MapUtils.isNotEmpty(monetizationPlanProperties)) { updateMonetizationPlan(subPolicy); } //policy id is not set. retrieving policy to get the id. SubscriptionPolicy retrievedPolicy = apiMgtDAO.getSubscriptionPolicy(subPolicy.getPolicyName(), tenantId); SubscriptionPolicyEvent subscriptionPolicyEvent = new SubscriptionPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_UPDATE.name(), tenantId,subPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), subPolicy.getPolicyName(), subPolicy.getDefaultQuotaPolicy().getType(), subPolicy.getRateLimitCount(),subPolicy.getRateLimitTimeUnit(), subPolicy.isStopOnQuotaReach(),subPolicy.getGraphQLMaxDepth(), subPolicy.getGraphQLMaxComplexity(), subPolicy.getSubscriberCount()); APIUtil.sendNotification(subscriptionPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof GlobalPolicy) { GlobalPolicy globalPolicy = (GlobalPolicy) policy; // getting key templates before updating database GlobalPolicy oldGlobalPolicy = apiMgtDAO.getGlobalPolicy(policy.getPolicyName()); oldKeyTemplate = oldGlobalPolicy.getKeyTemplate(); newKeyTemplate = globalPolicy.getKeyTemplate(); apiMgtDAO.updateGlobalPolicy(globalPolicy); GlobalPolicy retrievedPolicy = apiMgtDAO.getGlobalPolicy(globalPolicy.getPolicyName()); GlobalPolicyEvent globalPolicyEvent = new GlobalPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_UPDATE.name(), tenantId, globalPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), globalPolicy.getPolicyName()); APIUtil.sendNotification(globalPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else { String msg = "Policy type " + policy.getClass().getName() + " is not supported"; log.error(msg); throw new UnsupportedPolicyTypeException(msg); } //publishing keytemplate after update if (oldKeyTemplate != null && newKeyTemplate != null) { publishKeyTemplateEvent(oldKeyTemplate, "remove"); publishKeyTemplateEvent(newKeyTemplate, "add"); } } /** * @param username username to recognize tenant * @param level policy level to be applied * @return * @throws APIManagementException */ public String[] getPolicyNames(String username, String level) throws APIManagementException { String[] policyNames = apiMgtDAO.getPolicyNames(level, username); return policyNames; } /** * @param username username to recognize the tenant * @param policyLevel policy level * @param policyName name of the policy to be deleted * @throws APIManagementException */ public void deletePolicy(String username, String policyLevel, String policyName) throws APIManagementException { int tenantID = APIUtil.getTenantId(username); if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { //need to load whole policy object to get the pipelines APIPolicy policy = apiMgtDAO.getAPIPolicy(policyName, APIUtil.getTenantId(username)); List<Integer> deletedConditionGroupIds = new ArrayList<>(); for (Pipeline pipeline : policy.getPipelines()) { deletedConditionGroupIds.add(pipeline.getId()); } APIPolicyEvent apiPolicyEvent = new APIPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_DELETE.name(), tenantId, policy.getTenantDomain(), policy.getPolicyId(), policy.getPolicyName(), policy.getDefaultQuotaPolicy().getType(), null, deletedConditionGroupIds); APIUtil.sendNotification(apiPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { ApplicationPolicy appPolicy = apiMgtDAO.getApplicationPolicy(policyName, tenantID); ApplicationPolicyEvent applicationPolicyEvent = new ApplicationPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_DELETE.name(), tenantId, appPolicy.getTenantDomain(), appPolicy.getPolicyId(), appPolicy.getPolicyName(), appPolicy.getDefaultQuotaPolicy().getType()); APIUtil.sendNotification(applicationPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { SubscriptionPolicy subscriptionPolicy = apiMgtDAO.getSubscriptionPolicy(policyName, tenantID); //call the monetization extension point to delete plans if any deleteMonetizationPlan(subscriptionPolicy); SubscriptionPolicyEvent subscriptionPolicyEvent = new SubscriptionPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_DELETE.name(), tenantId, subscriptionPolicy.getTenantDomain(), subscriptionPolicy.getPolicyId(), subscriptionPolicy.getPolicyName(), subscriptionPolicy.getDefaultQuotaPolicy().getType(), subscriptionPolicy.getRateLimitCount(), subscriptionPolicy.getRateLimitTimeUnit(), subscriptionPolicy.isStopOnQuotaReach(), subscriptionPolicy.getGraphQLMaxDepth(), subscriptionPolicy.getGraphQLMaxComplexity(), subscriptionPolicy.getSubscriberCount()); APIUtil.sendNotification(subscriptionPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { GlobalPolicy globalPolicy = apiMgtDAO.getGlobalPolicy(policyName); GlobalPolicyEvent globalPolicyEvent = new GlobalPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_DELETE.name(), tenantId, globalPolicy.getTenantDomain(), globalPolicy.getPolicyId(), globalPolicy.getPolicyName()); APIUtil.sendNotification(globalPolicyEvent, APIConstants.NotifierType.POLICY.name()); } GlobalPolicy globalPolicy = null; if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { globalPolicy = apiMgtDAO.getGlobalPolicy(policyName); } //remove from database apiMgtDAO.removeThrottlePolicy(policyLevel, policyName, tenantID); if (globalPolicy != null) { publishKeyTemplateEvent(globalPolicy.getKeyTemplate(), "remove"); } } /** * Returns true if key template given by the global policy already exists. * But this check will exclude the policy represented by the policy name * * @param policy Global policy * @return true if Global policy key template already exists */ public boolean isGlobalPolicyKeyTemplateExists(GlobalPolicy policy) throws APIManagementException { return apiMgtDAO.isKeyTemplatesExist(policy); } public boolean hasAttachments(String username, String policyName, String policyType) throws APIManagementException { int tenantID = APIUtil.getTenantId(username); String tenantDomain = MultitenantUtils.getTenantDomain(username); String tenantDomainWithAt = username; if (APIUtil.getSuperTenantId() != tenantID) { tenantDomainWithAt = "@" + tenantDomain; } boolean hasSubscription = apiMgtDAO.hasSubscription(policyName, tenantDomainWithAt, policyType); return hasSubscription; } @Override public List<BlockConditionsDTO> getBlockConditions() throws APIManagementException { return apiMgtDAO.getBlockConditions(tenantDomain); } @Override public BlockConditionsDTO getBlockCondition(int conditionId) throws APIManagementException { return apiMgtDAO.getBlockCondition(conditionId); } @Override public BlockConditionsDTO getBlockConditionByUUID(String uuid) throws APIManagementException { BlockConditionsDTO blockCondition = apiMgtDAO.getBlockConditionByUUID(uuid); if (blockCondition == null) { handleBlockConditionNotFoundException("Block condition: " + uuid + " was not found."); } return blockCondition; } @Override public boolean updateBlockCondition(int conditionId, String state) throws APIManagementException { boolean updateState = apiMgtDAO.updateBlockConditionState(conditionId, state); BlockConditionsDTO blockConditionsDTO = apiMgtDAO.getBlockCondition(conditionId); if (updateState) { publishBlockingEventUpdate(blockConditionsDTO); } return updateState; } @Override public boolean updateBlockConditionByUUID(String uuid, String state) throws APIManagementException { boolean updateState = apiMgtDAO.updateBlockConditionStateByUUID(uuid, state); BlockConditionsDTO blockConditionsDTO = apiMgtDAO.getBlockConditionByUUID(uuid); if (updateState && blockConditionsDTO != null) { publishBlockingEventUpdate(blockConditionsDTO); } return updateState; } @Override public String addBlockCondition(String conditionType, String conditionValue) throws APIManagementException { if (APIConstants.BLOCKING_CONDITIONS_USER.equals(conditionType)) { conditionValue = MultitenantUtils.getTenantAwareUsername(conditionValue); conditionValue = conditionValue + "@" + tenantDomain; } BlockConditionsDTO blockConditionsDTO = new BlockConditionsDTO(); blockConditionsDTO.setConditionType(conditionType); blockConditionsDTO.setConditionValue(conditionValue); blockConditionsDTO.setTenantDomain(tenantDomain); blockConditionsDTO.setEnabled(true); blockConditionsDTO.setUUID(UUID.randomUUID().toString()); BlockConditionsDTO createdBlockConditionsDto = apiMgtDAO.addBlockConditions(blockConditionsDTO); if (createdBlockConditionsDto != null) { publishBlockingEvent(createdBlockConditionsDto, "true"); } return createdBlockConditionsDto.getUUID(); } @Override public String addBlockCondition(String conditionType, String conditionValue, boolean conditionStatus) throws APIManagementException { if (APIConstants.BLOCKING_CONDITIONS_USER.equals(conditionType)) { conditionValue = MultitenantUtils.getTenantAwareUsername(conditionValue); conditionValue = conditionValue + "@" + tenantDomain; } BlockConditionsDTO blockConditionsDTO = new BlockConditionsDTO(); blockConditionsDTO.setConditionType(conditionType); blockConditionsDTO.setConditionValue(conditionValue); blockConditionsDTO.setTenantDomain(tenantDomain); blockConditionsDTO.setEnabled(conditionStatus); blockConditionsDTO.setUUID(UUID.randomUUID().toString()); BlockConditionsDTO createdBlockConditionsDto = apiMgtDAO.addBlockConditions(blockConditionsDTO); if (createdBlockConditionsDto != null) { publishBlockingEvent(createdBlockConditionsDto, "true"); } return createdBlockConditionsDto.getUUID(); } @Override public boolean deleteBlockCondition(int conditionId) throws APIManagementException { BlockConditionsDTO blockCondition = apiMgtDAO.getBlockCondition(conditionId); boolean deleteState = apiMgtDAO.deleteBlockCondition(conditionId); if (deleteState && blockCondition != null) { unpublishBlockCondition(blockCondition); } return deleteState; } @Override public boolean deleteBlockConditionByUUID(String uuid) throws APIManagementException { boolean deleteState = false; BlockConditionsDTO blockCondition = apiMgtDAO.getBlockConditionByUUID(uuid); if (blockCondition != null) { deleteState = apiMgtDAO.deleteBlockCondition(blockCondition.getConditionId()); if (deleteState) { unpublishBlockCondition(blockCondition); } } return deleteState; } /** * Unpublish a blocking condition. * * @param blockCondition Block Condition object */ private void unpublishBlockCondition(BlockConditionsDTO blockCondition) { String blockingConditionType = blockCondition.getConditionType(); String blockingConditionValue = blockCondition.getConditionValue(); if (APIConstants.BLOCKING_CONDITIONS_USER.equalsIgnoreCase(blockingConditionType)) { blockingConditionValue = MultitenantUtils.getTenantAwareUsername(blockingConditionValue); blockingConditionValue = blockingConditionValue + "@" + tenantDomain; blockCondition.setConditionValue(blockingConditionValue); } publishBlockingEvent(blockCondition, "delete"); } @Override public APIPolicy getAPIPolicy(String username, String policyName) throws APIManagementException { return apiMgtDAO.getAPIPolicy(policyName, APIUtil.getTenantId(username)); } @Override public APIPolicy getAPIPolicyByUUID(String uuid) throws APIManagementException { APIPolicy policy = apiMgtDAO.getAPIPolicyByUUID(uuid); if (policy == null) { handlePolicyNotFoundException("Advanced Policy: " + uuid + " was not found."); } return policy; } @Override public ApplicationPolicy getApplicationPolicy(String username, String policyName) throws APIManagementException { return apiMgtDAO.getApplicationPolicy(policyName, APIUtil.getTenantId(username)); } @Override public ApplicationPolicy getApplicationPolicyByUUID(String uuid) throws APIManagementException { ApplicationPolicy policy = apiMgtDAO.getApplicationPolicyByUUID(uuid); if (policy == null) { handlePolicyNotFoundException("Application Policy: " + uuid + " was not found."); } return policy; } @Override public SubscriptionPolicy getSubscriptionPolicy(String username, String policyName) throws APIManagementException { return apiMgtDAO.getSubscriptionPolicy(policyName, APIUtil.getTenantId(username)); } @Override public SubscriptionPolicy getSubscriptionPolicyByUUID(String uuid) throws APIManagementException { SubscriptionPolicy policy = apiMgtDAO.getSubscriptionPolicyByUUID(uuid); if (policy == null) { handlePolicyNotFoundException("Subscription Policy: " + uuid + " was not found."); } return policy; } @Override public GlobalPolicy getGlobalPolicy(String policyName) throws APIManagementException { return apiMgtDAO.getGlobalPolicy(policyName); } @Override public GlobalPolicy getGlobalPolicyByUUID(String uuid) throws APIManagementException { GlobalPolicy policy = apiMgtDAO.getGlobalPolicyByUUID(uuid); if (policy == null) { handlePolicyNotFoundException("Global Policy: " + uuid + " was not found."); } return policy; } /** * Publishes the changes on blocking conditions. * * @param blockCondition Block Condition object * @throws APIManagementException */ private void publishBlockingEventUpdate(BlockConditionsDTO blockCondition) throws APIManagementException { if (blockCondition != null) { String blockingConditionType = blockCondition.getConditionType(); String blockingConditionValue = blockCondition.getConditionValue(); if (APIConstants.BLOCKING_CONDITIONS_USER.equalsIgnoreCase(blockingConditionType)) { blockingConditionValue = MultitenantUtils.getTenantAwareUsername(blockingConditionValue); blockingConditionValue = blockingConditionValue + "@" + tenantDomain; blockCondition.setConditionValue(blockingConditionValue); } publishBlockingEvent(blockCondition, Boolean.toString(blockCondition.isEnabled())); } } /** * Publishes the changes on blocking conditions. * @param blockConditionsDTO Blockcondition Dto event */ private void publishBlockingEvent(BlockConditionsDTO blockConditionsDTO, String state) { String conditionType = blockConditionsDTO.getConditionType(); String conditionValue = blockConditionsDTO.getConditionValue(); if (APIConstants.BLOCKING_CONDITIONS_IP.equals(conditionType) || APIConstants.BLOCK_CONDITION_IP_RANGE.equals(conditionType)) { conditionValue = StringEscapeUtils.escapeJava(conditionValue); } Object[] objects = new Object[]{blockConditionsDTO.getConditionId(), blockConditionsDTO.getConditionType(), conditionValue, state, tenantDomain}; Event blockingMessage = new Event(APIConstants.BLOCKING_CONDITIONS_STREAM_ID, System.currentTimeMillis(), null, null, objects); ThrottleProperties throttleProperties = getAPIManagerConfiguration().getThrottleProperties(); if (throttleProperties.getDataPublisher() != null && throttleProperties.getDataPublisher().isEnabled()) { APIUtil.publishEventToTrafficManager(Collections.EMPTY_MAP, blockingMessage); } } private void publishKeyTemplateEvent(String templateValue, String state) { Object[] objects = new Object[]{templateValue,state}; Event keyTemplateMessage = new Event(APIConstants.KEY_TEMPLATE_STREM_ID, System.currentTimeMillis(), null, null, objects); ThrottleProperties throttleProperties = getAPIManagerConfiguration().getThrottleProperties(); if (throttleProperties.getDataPublisher() != null && throttleProperties.getDataPublisher().isEnabled()) { APIUtil.publishEventToTrafficManager(Collections.EMPTY_MAP, keyTemplateMessage); } } public String getLifecycleConfiguration(String tenantDomain) throws APIManagementException { boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } return APIUtil.getFullLifeCycleData(configRegistry); } catch (XMLStreamException e) { handleException("Parsing error while getting the lifecycle configuration content.", e); return null; } catch (RegistryException e) { handleException("Registry error while getting the lifecycle configuration content.", e); return null; } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } public String getExternalWorkflowReferenceId(int subscriptionId) throws APIManagementException { return apiMgtDAO.getExternalWorkflowReferenceForSubscription(subscriptionId); } @Override public int addCertificate(String userName, String certificate, String alias, String endpoint) throws APIManagementException { ResponseCode responseCode = ResponseCode.INTERNAL_SERVER_ERROR; String tenantDomain = MultitenantUtils.getTenantDomain(userName); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); responseCode = certificateManager .addCertificateToParentNode(certificate, alias, endpoint, tenantId); CertificateEvent certificateEvent = new CertificateEvent(UUID.randomUUID().toString(), System.currentTimeMillis(),APIConstants.EventType.ENDPOINT_CERTIFICATE_ADD.toString(), tenantDomain,alias,endpoint); APIUtil.sendNotification(certificateEvent, APIConstants.NotifierType.CERTIFICATE.name()); } catch (UserStoreException e) { handleException("Error while reading tenant information", e); } return responseCode.getResponseCode(); } @Override public int addClientCertificate(String userName, APIIdentifier apiIdentifier, String certificate, String alias, String tierName, String organization) throws APIManagementException { ResponseCode responseCode = ResponseCode.INTERNAL_SERVER_ERROR; String tenantDomain = MultitenantUtils.getTenantDomain(userName); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); responseCode = certificateManager .addClientCertificate(apiIdentifier, certificate, alias, tierName, tenantId, organization); } catch (UserStoreException e) { handleException("Error while reading tenant information, client certificate addition failed for the API " + apiIdentifier.toString(), e); } return responseCode.getResponseCode(); } @Override public int deleteCertificate(String userName, String alias, String endpoint) throws APIManagementException { ResponseCode responseCode = ResponseCode.INTERNAL_SERVER_ERROR; String tenantDomain = MultitenantUtils.getTenantDomain(userName); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); responseCode = certificateManager.deleteCertificateFromParentNode(alias, endpoint, tenantId); CertificateEvent certificateEvent = new CertificateEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.ENDPOINT_CERTIFICATE_REMOVE.toString(), tenantDomain, alias, endpoint); APIUtil.sendNotification(certificateEvent, APIConstants.NotifierType.CERTIFICATE.name()); } catch (UserStoreException e) { handleException("Error while reading tenant information", e); } return responseCode.getResponseCode(); } @Override public int deleteClientCertificate(String userName, APIIdentifier apiIdentifier, String alias) throws APIManagementException { ResponseCode responseCode = ResponseCode.INTERNAL_SERVER_ERROR; String tenantDomain = MultitenantUtils.getTenantDomain(userName); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); responseCode = certificateManager.deleteClientCertificateFromParentNode(apiIdentifier, alias, tenantId); } catch (UserStoreException e) { handleException( "Error while reading tenant information while trying to delete client certificate with alias " + alias + " for the API " + apiIdentifier.toString(), e); } return responseCode.getResponseCode(); } @Override public boolean isConfigured() { return certificateManager.isConfigured(); } @Override public List<CertificateMetadataDTO> getCertificates(String userName) throws APIManagementException { int tenantId = 0; try { tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); } catch (UserStoreException e) { handleException("Error while reading tenant information", e); } return certificateManager.getCertificates(tenantId); } @Override public List<CertificateMetadataDTO> searchCertificates(int tenantId, String alias, String endpoint) throws APIManagementException { return certificateManager.getCertificates(tenantId, alias, endpoint); } @Override public List<ClientCertificateDTO> searchClientCertificates(int tenantId, String alias, APIIdentifier apiIdentifier, String organization) throws APIManagementException { return certificateManager.searchClientCertificates(tenantId, alias, apiIdentifier, organization); } @Override public List<ClientCertificateDTO> searchClientCertificates(int tenantId, String alias, APIProductIdentifier apiProductIdentifier, String organization) throws APIManagementException { APIIdentifier apiIdentifier = new APIIdentifier(apiProductIdentifier.getProviderName(), apiProductIdentifier.getName(), apiProductIdentifier.getVersion()); return certificateManager.searchClientCertificates(tenantId, alias, apiIdentifier, organization); } @Override public boolean isCertificatePresent(int tenantId, String alias) throws APIManagementException { return certificateManager.isCertificatePresent(tenantId, alias); } @Override public ClientCertificateDTO getClientCertificate(int tenantId, String alias, String organization) throws APIManagementException { List<ClientCertificateDTO> clientCertificateDTOS = certificateManager .searchClientCertificates(tenantId, alias, null, organization); if (clientCertificateDTOS != null && clientCertificateDTOS.size() > 0) { return clientCertificateDTOS.get(0); } return null; } @Override public ClientCertificateDTO getClientCertificate(int tenantId, String alias, APIIdentifier apiIdentifier, String organization) throws APIManagementException { List<ClientCertificateDTO> clientCertificateDTOS = certificateManager .searchClientCertificates(tenantId, alias, apiIdentifier, organization); if (clientCertificateDTOS != null && clientCertificateDTOS.size() > 0) { return clientCertificateDTOS.get(0); } return null; } @Override public CertificateInformationDTO getCertificateStatus(String alias) throws APIManagementException { return certificateManager.getCertificateInformation(alias); } @Override public int updateCertificate(String certificateString, String alias) throws APIManagementException { ResponseCode responseCode = certificateManager.updateCertificate(certificateString, alias); if (responseCode != null && responseCode.getResponseCode() == ResponseCode.SUCCESS.getResponseCode()) { CertificateEvent certificateEvent = new CertificateEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.ENDPOINT_CERTIFICATE_UPDATE.toString(), tenantDomain, alias); APIUtil.sendNotification(certificateEvent, APIConstants.NotifierType.CERTIFICATE.name()); } return responseCode != null ? responseCode.getResponseCode() : ResponseCode.INTERNAL_SERVER_ERROR.getResponseCode(); } @Override public int updateClientCertificate(String certificate, String alias, APIIdentifier apiIdentifier, String tier, int tenantId, String organization) throws APIManagementException { ResponseCode responseCode = certificateManager .updateClientCertificate(certificate, alias, tier, tenantId, organization); return responseCode != null ? responseCode.getResponseCode() : ResponseCode.INTERNAL_SERVER_ERROR.getResponseCode(); } @Override public int getCertificateCountPerTenant(int tenantId) throws APIManagementException { return certificateManager.getCertificateCount(tenantId); } @Override public int getClientCertificateCount(int tenantId) throws APIManagementException { return certificateManager.getClientCertificateCount(tenantId); } @Override public ByteArrayInputStream getCertificateContent(String alias) throws APIManagementException { return certificateManager.getCertificateContent(alias); } /** * Get the workflow status information for the given api for the given workflow type * * @param uuid Api uuid * @param workflowType workflow type * @return WorkflowDTO * @throws APIManagementException */ public WorkflowDTO getAPIWorkflowStatus(String uuid, String workflowType) throws APIManagementException { return APIUtil.getAPIWorkflowStatus(uuid, workflowType); } @Override public void deleteWorkflowTask(String uuid) throws APIManagementException { int apiId; try { apiId = apiMgtDAO.getAPIID(uuid); cleanUpPendingAPIStateChangeTask(apiId); } catch (APIManagementException e) { handleException("Error while deleting the workflow task.", e); } catch (WorkflowException e) { handleException("Error while deleting the workflow task.", e); } } private void cleanUpPendingAPIStateChangeTask(int apiId) throws WorkflowException, APIManagementException { //Run cleanup task for workflow WorkflowExecutor apiStateChangeWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_API_STATE); WorkflowDTO wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null && WorkflowStatus.CREATED == wfDTO.getStatus()) { apiStateChangeWFExecutor.cleanUpPendingTask(wfDTO.getExternalWorkflowReference()); } } /** * Clean-up pending subscriptions of a given API * * @param uuid API uuid * @throws APIManagementException */ private void cleanUpPendingSubscriptionCreationProcessesByAPI(String uuid) throws APIManagementException { WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor( WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION); Set<Integer> pendingSubscriptions = apiMgtDAO.getPendingSubscriptionsByAPIId(uuid); String workflowExtRef = null; for (int subscription : pendingSubscriptions) { try { workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(subscription); createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef); } catch (APIManagementException ex) { // failed clean-up processes are ignored to prevent failures in API state change flow log.warn("Failed to retrieve external workflow reference for subscription for subscription ID: " + subscription); } catch (WorkflowException ex) { // failed clean-up processes are ignored to prevent failures in API state change flow log.warn("Failed to clean-up pending subscription approval task for subscription ID: " + subscription); } } } /** * Returns the given workflow executor * * @param workflowType Workflow executor type * @return WorkflowExecutor of given type * @throws WorkflowException if an error occurred while getting WorkflowExecutor */ protected WorkflowExecutor getWorkflowExecutor(String workflowType) throws APIManagementException { try { return WorkflowExecutorFactory.getInstance().getWorkflowExecutor(workflowType); } catch (WorkflowException e) { handleException("Error while obtaining WorkflowExecutor instance for workflow type :" + workflowType); } return null; } protected void removeFromGateway(APIProduct apiProduct, String tenantDomain, Set<APIRevisionDeployment> gatewaysToRemove, Set<String> gatewaysToAdd) throws APIManagementException { APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); Set<API> associatedAPIs = getAssociatedAPIs(apiProduct); Set<String> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : gatewaysToRemove) { environmentsToRemove.add(apiRevisionDeployment.getDeployment()); } environmentsToRemove.removeAll(gatewaysToAdd); gatewayManager.unDeployFromGateway(apiProduct, tenantDomain, associatedAPIs, environmentsToRemove); } protected int getTenantId(String tenantDomain) throws UserStoreException { return ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain); } protected void sendAsncNotification(NotificationDTO notificationDTO) throws NotificationException { new NotificationExecutor().sendAsyncNotifications(notificationDTO); } protected void invalidateResourceCache(String apiContext, String apiVersion,Set<URITemplate> uriTemplates) { APIAuthenticationAdminClient client = new APIAuthenticationAdminClient(); client.invalidateResourceCache(apiContext, apiVersion, uriTemplates); } /** * To add API/Product roles restrictions and add additional properties. * * @param artifactPath Path of the API/Product artifact. * @param publisherAccessControlRoles Role specified for the publisher access control. * @param publisherAccessControl Publisher Access Control restriction. * @param additionalProperties Additional properties that is related with an API/Product. * @throws RegistryException Registry Exception. */ private void updateRegistryResources(String artifactPath, String publisherAccessControlRoles, String publisherAccessControl, Map<String, String> additionalProperties) throws RegistryException { publisherAccessControlRoles = (publisherAccessControlRoles == null || publisherAccessControlRoles.trim() .isEmpty()) ? APIConstants.NULL_USER_ROLE_LIST : publisherAccessControlRoles; if (publisherAccessControlRoles.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST)) { publisherAccessControl = APIConstants.NO_ACCESS_CONTROL; } if (!registry.resourceExists(artifactPath)) { return; } Resource apiResource = registry.get(artifactPath); if (apiResource != null) { if (additionalProperties != null) { // Removing all the properties, before updating new properties. Properties properties = apiResource.getProperties(); if (properties != null) { Enumeration propertyNames = properties.propertyNames(); while (propertyNames.hasMoreElements()) { String propertyName = (String) propertyNames.nextElement(); if (propertyName.startsWith(APIConstants.API_RELATED_CUSTOM_PROPERTIES_PREFIX)) { apiResource.removeProperty(propertyName); } } } } // We are changing to lowercase, as registry search only supports lower-case characters. apiResource.setProperty(APIConstants.PUBLISHER_ROLES, publisherAccessControlRoles.toLowerCase()); // This property will be only used for display proposes in the Publisher UI so that the original case of // the roles that were specified can be maintained. apiResource.setProperty(APIConstants.DISPLAY_PUBLISHER_ROLES, publisherAccessControlRoles); apiResource.setProperty(APIConstants.ACCESS_CONTROL, publisherAccessControl); apiResource.removeProperty(APIConstants.CUSTOM_API_INDEXER_PROPERTY); if (additionalProperties != null && additionalProperties.size() != 0) { for (Map.Entry<String, String> entry : additionalProperties.entrySet()) { apiResource.setProperty( (APIConstants.API_RELATED_CUSTOM_PROPERTIES_PREFIX + entry.getKey()), entry.getValue()); } } registry.put(artifactPath, apiResource); } } /** * To get the query to retrieve user role list query based on current role list. * * @return the query with user role list. * @throws APIManagementException API Management Exception. */ private String getUserRoleListQuery() throws APIManagementException { StringBuilder rolesQuery = new StringBuilder(); rolesQuery.append('('); rolesQuery.append(APIConstants.NULL_USER_ROLE_LIST); String[] userRoles = APIUtil.getListOfRoles(userNameWithoutChange); String skipRolesByRegex = APIUtil.getSkipRolesByRegex(); if (StringUtils.isNotEmpty(skipRolesByRegex)) { List<String> filteredUserRoles = new ArrayList<>(Arrays.asList(userRoles)); String[] regexList = skipRolesByRegex.split(","); for (int i = 0; i < regexList.length; i++) { Pattern p = Pattern.compile(regexList[i]); Iterator<String> itr = filteredUserRoles.iterator(); while(itr.hasNext()) { String role = itr.next(); Matcher m = p.matcher(role); if (m.matches()) { itr.remove(); } } } userRoles = filteredUserRoles.toArray(new String[0]); } if (userRoles != null) { for (String userRole : userRoles) { rolesQuery.append(" OR "); rolesQuery.append(ClientUtils.escapeQueryChars(APIUtil.sanitizeUserRole(userRole.toLowerCase()))); } } rolesQuery.append(")"); if(log.isDebugEnabled()) { log.debug("User role list solr query " + APIConstants.PUBLISHER_ROLES + "=" + rolesQuery.toString()); } return APIConstants.PUBLISHER_ROLES + "=" + rolesQuery.toString(); } @Override protected String getSearchQuery(String searchQuery) throws APIManagementException { if (!isAccessControlRestrictionEnabled || APIUtil.hasPermission(userNameWithoutChange, APIConstants.Permissions .APIM_ADMIN)) { return searchQuery; } String criteria = getUserRoleListQuery(); if (searchQuery != null && !searchQuery.trim().isEmpty()) { criteria = criteria + "&" + searchQuery; } return criteria; } /** * Method to get the user specified mediation sequence. * * @param apiIdentifier : The identifier of the api. * @param type : Mediation type. {in, out, fault} * @param name : The name of the sequence that needed. * @return : The content of the mediation sequence. */ public String getSequenceFileContent(APIIdentifier apiIdentifier, String type, String name) throws APIManagementException { Resource requiredSequence; InputStream sequenceStream; String sequenceText = ""; try { if (apiIdentifier != null && type != null && name != null) { if (log.isDebugEnabled()) { log.debug("Check the default " + type + "sequences for " + name); } requiredSequence = getDefaultSequence(type, name); if (requiredSequence == null) { if (log.isDebugEnabled()) { log.debug("Check the custom " + type +" sequences for " + name); } requiredSequence = getCustomSequence(apiIdentifier, type, name); } //Convert the content stream to a string. if (requiredSequence != null) { sequenceStream = requiredSequence.getContentStream(); StringWriter stringWriter = new StringWriter(); IOUtils.copy(sequenceStream, stringWriter); sequenceText = stringWriter.toString(); } else { log.error("No sequence for the name " + name + "is found!"); } } else { log.error("Invalid arguments."); } } catch (APIManagementException e) { log.error(e.getMessage()); throw new APIManagementException(e); } catch (RegistryException e) { log.error(e.getMessage()); throw new APIManagementException(e); } catch (IOException e) { log.error(e.getMessage()); throw new APIManagementException(e); } return sequenceText; } /** * Get the mediation sequence which matches the given type and name from the custom sequences. * * @param type : The sequence type. * @param name : The name of the sequence. * @return : The mediation sequence which matches the given parameters. Returns null if no matching sequence is * found. */ private Resource getDefaultSequence(String type, String name) throws APIManagementException { String defaultSequenceFileLocation = ""; try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (APIConstants.FAULT_SEQUENCE.equals(type)) { defaultSequenceFileLocation = APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION; } else if (APIConstants.OUT_SEQUENCE.equals(type)) { defaultSequenceFileLocation = APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION; } else { defaultSequenceFileLocation = APIConstants.API_CUSTOM_INSEQUENCE_LOCATION; } if (registry.resourceExists(defaultSequenceFileLocation)) { org.wso2.carbon.registry.api.Collection defaultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(defaultSequenceFileLocation); if (defaultSeqCollection != null) { String[] faultSeqChildPaths = defaultSeqCollection.getChildren(); for (String defaultSeqChildPath : faultSeqChildPaths) { Resource defaultSequence = registry.get(defaultSeqChildPath); OMElement seqElement = APIUtil.buildOMElement(defaultSequence.getContentStream()); if (name.equals(seqElement.getAttributeValue(new QName("name")))) { return defaultSequence; } } } } } catch (RegistryException e) { throw new APIManagementException("Error while retrieving registry for tenant " + tenantId, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { throw new APIManagementException("Error while processing the " + defaultSequenceFileLocation + " in the registry", e); } catch (Exception e) { throw new APIManagementException("Error while building the OMElement from the sequence " + name, e); } return null; } /** * Get the resource which matches the user selected resource type and the name from the custom uploaded sequences. * * @param identifier : The API Identifier. * @param type : The sequence type. * @return : Resource object which matches the parameters. If no resource found, return null. */ private Resource getCustomSequence(APIIdentifier identifier, String type, String name) throws APIManagementException { Resource customSequence = null; boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (identifier.getProviderName().contains("-AT-")) { String provider = identifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; } if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); String customSeqFileLocation = ""; if (APIConstants.FAULT_SEQUENCE.equals(type)) { customSeqFileLocation = APIUtil.getSequencePath(identifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); } else if (APIConstants.OUT_SEQUENCE.equals(type)) { customSeqFileLocation = APIUtil.getSequencePath(identifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); } else { customSeqFileLocation = APIUtil.getSequencePath(identifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); } if (registry.resourceExists(customSeqFileLocation)) { org.wso2.carbon.registry.api.Collection customSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customSeqFileLocation); if (customSeqCollection != null) { String[] faultSeqChildPaths = customSeqCollection.getChildren(); for (String customSeqChildPath : faultSeqChildPaths) { customSequence = registry.get(customSeqChildPath); OMElement seqElement = APIUtil.buildOMElement(customSequence.getContentStream()); if (name.equals(seqElement.getAttributeValue(new QName("name")))) { return customSequence; } } } } } catch (RegistryException e) { throw new APIManagementException("Error while retrieving registry for tenant " + tenantId, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { throw new APIManagementException("Error while processing the " + type + " sequences of " + identifier + " in the registry", e); } catch (Exception e) { throw new APIManagementException("Error while building the OMElement from the sequence " + name, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return null; } /* To check authorization of the API against current logged in user. If the user is not authorized an exception * will be thrown. * * @param identifier API identifier * @throws APIManagementException APIManagementException */ protected void checkAccessControlPermission(Identifier identifier) throws APIManagementException { if (identifier == null || !isAccessControlRestrictionEnabled) { if (!isAccessControlRestrictionEnabled && log.isDebugEnabled()) { log.debug("Publisher access control restriction is not enabled. Hence the API " + identifier + " can be editable and viewable by all the API publishers and creators."); } return; } String resourcePath = StringUtils.EMPTY; String identifierType = StringUtils.EMPTY; if (identifier instanceof APIIdentifier) { resourcePath = APIUtil.getAPIPath((APIIdentifier) identifier); identifierType = APIConstants.API_IDENTIFIER_TYPE; } else if (identifier instanceof APIProductIdentifier) { resourcePath = APIUtil.getAPIProductPath((APIProductIdentifier) identifier); identifierType = APIConstants.API_PRODUCT_IDENTIFIER_TYPE; } try { Registry sysRegistry = getRegistryService().getGovernanceSystemRegistry(); // Need user name with tenant domain to get correct domain name from // MultitenantUtils.getTenantDomain(username) String userNameWithTenantDomain = (userNameWithoutChange != null) ? userNameWithoutChange : username; if (!sysRegistry.resourceExists(resourcePath)) { if (log.isDebugEnabled()) { log.debug("Resource does not exist in the path : " + resourcePath + " this can happen if this is in the " + "middle of the new " + identifierType + " creation, hence not checking the access control"); } return; } Resource resource = sysRegistry.get(resourcePath); if (resource == null) { return; } String accessControlProperty = resource.getProperty(APIConstants.ACCESS_CONTROL); if (accessControlProperty == null || accessControlProperty.trim().isEmpty() || accessControlProperty .equalsIgnoreCase(APIConstants.NO_ACCESS_CONTROL)) { if (log.isDebugEnabled()) { log.debug(identifierType + " in the path " + resourcePath + " does not have any access control restriction"); } return; } if (APIUtil.hasPermission(userNameWithTenantDomain, APIConstants.Permissions.APIM_ADMIN)) { return; } String publisherAccessControlRoles = resource.getProperty(APIConstants.DISPLAY_PUBLISHER_ROLES); if (publisherAccessControlRoles != null && !publisherAccessControlRoles.trim().isEmpty()) { String[] accessControlRoleList = publisherAccessControlRoles.replaceAll("\\s+", "").split(","); if (log.isDebugEnabled()) { log.debug(identifierType + " has restricted access to creators and publishers with the roles : " + Arrays .toString(accessControlRoleList)); } String[] userRoleList = APIUtil.getListOfRoles(userNameWithTenantDomain); if (log.isDebugEnabled()) { log.debug("User " + username + " has roles " + Arrays.toString(userRoleList)); } for (String role : accessControlRoleList) { if (!role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) && APIUtil .compareRoleList(userRoleList, role)) { return; } } if (log.isDebugEnabled()) { log.debug(identifierType + " " + identifier + " cannot be accessed by user '" + username + "'. It " + "has a publisher access control restriction"); } throw new APIManagementException( APIConstants.UN_AUTHORIZED_ERROR_MESSAGE + " view or modify the " + identifierType + " " + identifier); } } catch (RegistryException e) { throw new APIManagementException( "Registry Exception while trying to check the access control restriction of " + identifierType + " " + identifier .getName(), e); } } @Override public Map<API, List<APIProductResource>> addAPIProductWithoutPublishingToGateway(APIProduct product) throws APIManagementException { Map<API, List<APIProductResource>> apiToProductResourceMapping = new HashMap<>(); validateApiProductInfo(product); String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(product.getId().getProviderName())); if (log.isDebugEnabled()) { log.debug("API Product details successfully added to the registry. API Product Name: " + product.getId().getName() + ", API Product Version : " + product.getId().getVersion() + ", API Product context : " + "change"); //todo: log context } List<APIProductResource> resources = product.getProductResources(); // list to hold resources which are actually in an existing api. If user has created an API product with invalid // API or invalid resource of a valid API, that content will be removed .validResources array will have only // legitimate apis List<APIProductResource> validResources = new ArrayList<APIProductResource>(); for (APIProductResource apiProductResource : resources) { API api; String apiUUID; if (apiProductResource.getProductIdentifier() != null) { APIIdentifier productAPIIdentifier = apiProductResource.getApiIdentifier(); String emailReplacedAPIProviderName = APIUtil.replaceEmailDomain(productAPIIdentifier.getProviderName()); APIIdentifier emailReplacedAPIIdentifier = new APIIdentifier(emailReplacedAPIProviderName, productAPIIdentifier.getApiName(), productAPIIdentifier.getVersion()); apiUUID = apiMgtDAO.getUUIDFromIdentifier(emailReplacedAPIIdentifier, product.getOrganization()); api = getAPIbyUUID(apiUUID, product.getOrganization()); } else { apiUUID = apiProductResource.getApiId(); api = getAPIbyUUID(apiUUID, product.getOrganization()); // if API does not exist, getLightweightAPIByUUID() method throws exception. } if (api != null) { validateApiLifeCycleForApiProducts(api); if (api.getSwaggerDefinition() != null) { api.setSwaggerDefinition(getOpenAPIDefinition(apiUUID, product.getOrganization())); } if (!apiToProductResourceMapping.containsKey(api)) { apiToProductResourceMapping.put(api, new ArrayList<>()); } List<APIProductResource> apiProductResources = apiToProductResourceMapping.get(api); apiProductResources.add(apiProductResource); apiProductResource.setApiIdentifier(api.getId()); apiProductResource.setProductIdentifier(product.getId()); apiProductResource.setEndpointConfig(api.getEndpointConfig()); apiProductResource.setEndpointSecurityMap(APIUtil.setEndpointSecurityForAPIProduct(api)); URITemplate uriTemplate = apiProductResource.getUriTemplate(); Map<String, URITemplate> templateMap = apiMgtDAO.getURITemplatesForAPI(api); if (uriTemplate == null) { //if no resources are define for the API, we ingore that api for the product } else { String key = uriTemplate.getHTTPVerb() + ":" + uriTemplate.getResourceURI(); if (templateMap.containsKey(key)) { //Since the template ID is not set from the request, we manually set it. uriTemplate.setId(templateMap.get(key).getId()); //request has a valid API id and a valid resource. we add it to valid resource map validResources.add(apiProductResource); } else { //ignore log.warn("API with id " + apiProductResource.getApiId() + " does not have a resource " + uriTemplate.getResourceURI() + " with http method " + uriTemplate.getHTTPVerb()); } } } } //set the valid resources only product.setProductResources(validResources); //now we have validated APIs and it's resources inside the API product. Add it to database // Create registry artifact String apiProductUUID = createAPIProduct(product); product.setUuid(apiProductUUID); // Add to database apiMgtDAO.addAPIProduct(product, product.getOrganization()); return apiToProductResourceMapping; } @Override public void saveToGateway(APIProduct product) throws APIManagementException { List<APIProductResource> productResources = product.getProductResources(); //Only publish to gateways if the state is in Published state and has atleast one resource } public void deleteAPIProduct(APIProduct apiProduct) throws APIManagementException { APIProductIdentifier identifier = apiProduct.getId(); try { //int apiId = apiMgtDAO.getAPIID(identifier, null); long subsCount = apiMgtDAO.getAPISubscriptionCountByAPI(identifier); if (subsCount > 0) { //Logging as a WARN since this isn't an error scenario. String message = "Cannot remove the API Product as active subscriptions exist."; log.warn(message); throw new APIManagementException(message); } // gatewayType check is required when API Management is deployed on // other servers to avoid synapse deleteAPIProductRevisions(apiProduct.getUuid(), apiProduct.getOrganization()); apiPersistenceInstance.deleteAPIProduct(new Organization(apiProduct.getOrganization()), apiProduct.getUuid()); apiMgtDAO.deleteAPIProduct(identifier); if (log.isDebugEnabled()) { String logMessage = "API Product Name: " + identifier.getName() + ", API Product Version " + identifier.getVersion() + " successfully removed from the database."; log.debug(logMessage); } JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, identifier.getName()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, identifier.getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName()); APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API_PRODUCT, apiLogObject.toString(), APIConstants.AuditLogConstants.DELETED, this.username); GatewayArtifactsMgtDAO.getInstance().deleteGatewayArtifacts(apiProduct.getUuid()); } catch (APIPersistenceException e) { handleException("Failed to remove the API product", e); } } @Override public void deleteAPIProduct(APIProductIdentifier identifier, String apiProductUUID, String organization) throws APIManagementException { if (StringUtils.isEmpty(apiProductUUID)) { if (identifier.getUUID() != null) { apiProductUUID = identifier.getUUID(); } else { apiProductUUID = apiMgtDAO.getUUIDFromIdentifier(identifier, organization); } } APIProduct apiProduct = getAPIProductbyUUID(apiProductUUID, organization); apiProduct.setOrganization(organization); deleteAPIProduct(apiProduct); } @Override public Map<API, List<APIProductResource>> updateAPIProduct(APIProduct product) throws APIManagementException, FaultGatewaysException { Map<API, List<APIProductResource>> apiToProductResourceMapping = new HashMap<>(); //validate resources and set api identifiers and resource ids to product List<APIProductResource> resources = product.getProductResources(); for (APIProductResource apiProductResource : resources) { API api; APIProductIdentifier productIdentifier = apiProductResource.getProductIdentifier(); String apiUUID; if (productIdentifier != null) { APIIdentifier productAPIIdentifier = apiProductResource.getApiIdentifier(); String emailReplacedAPIProviderName = APIUtil.replaceEmailDomain(productAPIIdentifier.getProviderName()); APIIdentifier emailReplacedAPIIdentifier = new APIIdentifier(emailReplacedAPIProviderName, productAPIIdentifier.getApiName(), productAPIIdentifier.getVersion()); apiUUID = apiMgtDAO.getUUIDFromIdentifier(emailReplacedAPIIdentifier, product.getOrganization()); api = getAPIbyUUID(apiUUID, tenantDomain); } else { apiUUID = apiProductResource.getApiId(); api = getAPIbyUUID(apiUUID, tenantDomain); } if (api.getSwaggerDefinition() != null) { api.setSwaggerDefinition(getOpenAPIDefinition(apiUUID, tenantDomain)); } if (!apiToProductResourceMapping.containsKey(api)) { apiToProductResourceMapping.put(api, new ArrayList<>()); } List<APIProductResource> apiProductResources = apiToProductResourceMapping.get(api); apiProductResources.add(apiProductResource); // if API does not exist, getLightweightAPIByUUID() method throws exception. so no need to handle NULL apiProductResource.setApiIdentifier(api.getId()); apiProductResource.setProductIdentifier(product.getId()); apiProductResource.setEndpointConfig(api.getEndpointConfig()); apiProductResource.setEndpointSecurityMap(APIUtil.setEndpointSecurityForAPIProduct(api)); URITemplate uriTemplate = apiProductResource.getUriTemplate(); Map<String, URITemplate> templateMap = apiMgtDAO.getURITemplatesForAPI(api); if (uriTemplate == null) { // TODO handle if no resource is defined. either throw an error or add all the resources of that API // to the product } else { String key = uriTemplate.getHTTPVerb() + ":" + uriTemplate.getUriTemplate(); if (templateMap.containsKey(key)) { //Since the template ID is not set from the request, we manually set it. uriTemplate.setId(templateMap.get(key).getId()); } else { throw new APIManagementException("API with id " + apiProductResource.getApiId() + " does not have a resource " + uriTemplate.getUriTemplate() + " with http method " + uriTemplate.getHTTPVerb()); } } } APIProduct oldApi = getAPIProductbyUUID(product.getUuid(), CarbonContext.getThreadLocalCarbonContext().getTenantDomain()); Gson gson = new Gson(); Map<String, String> oldMonetizationProperties = gson.fromJson(oldApi.getMonetizationProperties().toString(), HashMap.class); if (oldMonetizationProperties != null && !oldMonetizationProperties.isEmpty()) { Map<String, String> newMonetizationProperties = gson.fromJson(product.getMonetizationProperties().toString(), HashMap.class); if (newMonetizationProperties != null) { for (Map.Entry<String, String> entry : oldMonetizationProperties.entrySet()) { String newValue = newMonetizationProperties.get(entry.getKey()); if (StringUtils.isAllBlank(newValue)) { newMonetizationProperties.put(entry.getKey(), entry.getValue()); } } JSONParser parser = new JSONParser(); try { JSONObject jsonObj = (JSONObject) parser.parse(gson.toJson(newMonetizationProperties)); product.setMonetizationProperties(jsonObj); } catch (ParseException e) { throw new APIManagementException("Error when parsing monetization properties ", e); } } } invalidateResourceCache(product.getContext(), product.getId().getVersion(), Collections.EMPTY_SET); //todo : check whether permissions need to be updated and pass it along updateApiProductArtifact(product, true, true); apiMgtDAO.updateAPIProduct(product, userNameWithoutChange); int productId = apiMgtDAO.getAPIProductId(product.getId()); APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, product.getId().getName(), productId, product.getId().getUUID(), product.getId().getVersion(), product.getType(), product.getContext(), product.getId().getProviderName(), APIConstants.LC_PUBLISH_LC_STATE); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); return apiToProductResourceMapping; } @Override public List<ResourcePath> getResourcePathsOfAPI(APIIdentifier apiId) throws APIManagementException { return apiMgtDAO.getResourcePathsOfAPI(apiId); } private void validateApiLifeCycleForApiProducts(API api) throws APIManagementException { String status = api.getStatus(); if (APIConstants.BLOCKED.equals(status) || APIConstants.PROTOTYPED.equals(status) || APIConstants.DEPRECATED.equals(status) || APIConstants.RETIRED.equals(status)) { throw new APIManagementException("Cannot create API Product using API with following status: " + status, ExceptionCodes.from(ExceptionCodes.API_PRODUCT_WITH_UNSUPPORTED_LIFECYCLE_API, status)); } } /** * Validates the name of api product against illegal characters. * * @param product APIProduct info object * @throws APIManagementException */ private void validateApiProductInfo(APIProduct product) throws APIManagementException { String apiName = product.getId().getName(); if (apiName == null) { handleException("API Name is required."); } else if (containsIllegals(apiName)) { handleException("API Name contains one or more illegal characters " + "( " + APIConstants.REGEX_ILLEGAL_CHARACTERS_FOR_API_METADATA + " )"); } //version is not a mandatory field for now if (!hasValidLength(apiName, APIConstants.MAX_LENGTH_API_NAME) || !hasValidLength(product.getId().getVersion(), APIConstants.MAX_LENGTH_VERSION) || !hasValidLength(product.getId().getProviderName(), APIConstants.MAX_LENGTH_PROVIDER) || !hasValidLength(product.getContext(), APIConstants.MAX_LENGTH_CONTEXT)) { throw new APIManagementException("Character length exceeds the allowable limit", ExceptionCodes.LENGTH_EXCEEDS); } } /** * Create an Api Product * * @param apiProduct API Product * @throws APIManagementException if failed to create APIProduct */ protected String createAPIProduct(APIProduct apiProduct) throws APIManagementException { String apiProductUUID = null; // Validate Transports and Security validateAndSetTransports(apiProduct); validateAndSetAPISecurity(apiProduct); PublisherAPIProduct publisherAPIProduct = APIProductMapper.INSTANCE.toPublisherApiProduct(apiProduct); PublisherAPIProduct addedAPIProduct; try { publisherAPIProduct.setApiProductName(apiProduct.getId().getName()); publisherAPIProduct.setProviderName(apiProduct.getId().getProviderName()); publisherAPIProduct.setVersion(apiProduct.getId().getVersion()); addedAPIProduct = apiPersistenceInstance.addAPIProduct( new Organization(CarbonContext.getThreadLocalCarbonContext().getTenantDomain()), publisherAPIProduct); apiProductUUID = addedAPIProduct.getId(); } catch (APIPersistenceException e) { throw new APIManagementException("Error while creating API product ", e); } return apiProductUUID; } private void changeLifeCycleStatusToPublish(APIProductIdentifier apiIdentifier) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(this.tenantDomain, true); String productArtifactId = registry.get(APIUtil.getAPIProductPath(apiIdentifier)).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact apiArtifact = artifactManager.getGenericArtifact(productArtifactId); if (apiArtifact != null) { apiArtifact.invokeAction("Publish", APIConstants.API_LIFE_CYCLE); if (log.isDebugEnabled()) { String logMessage = "API Product Status changed successfully. API Product Name: " + apiIdentifier.getName(); log.debug(logMessage); } } } catch (RegistryException e) { throw new APIManagementException("Error while Changing Lifecycle status of API Product " + apiIdentifier.getName(), e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } /** * Update API Product Artifact in Registry * * @param apiProduct * @param updateMetadata * @param updatePermissions * @throws APIManagementException */ private void updateApiProductArtifact(APIProduct apiProduct, boolean updateMetadata, boolean updatePermissions) throws APIManagementException { //Validate Transports and Security validateAndSetTransports(apiProduct); validateAndSetAPISecurity(apiProduct); PublisherAPIProduct publisherAPIProduct = APIProductMapper.INSTANCE.toPublisherApiProduct(apiProduct); PublisherAPIProduct addedAPIProduct; try { publisherAPIProduct.setApiProductName(apiProduct.getId().getName()); publisherAPIProduct.setProviderName(apiProduct.getId().getProviderName()); publisherAPIProduct.setVersion(apiProduct.getId().getVersion()); addedAPIProduct = apiPersistenceInstance.updateAPIProduct( new Organization(CarbonContext.getThreadLocalCarbonContext().getTenantDomain()), publisherAPIProduct); } catch (APIPersistenceException e) { throw new APIManagementException("Error while creating API product "); } } public void updateProductResourceMappings(API api, String organization, List<APIProductResource> productResources) throws APIManagementException { //get uri templates of API again Map<String, URITemplate> apiResources = apiMgtDAO.getURITemplatesForAPI(api); for (APIProductResource productResource : productResources) { URITemplate uriTemplate = productResource.getUriTemplate(); String productResourceKey = uriTemplate.getHTTPVerb() + ":" + uriTemplate.getUriTemplate(); //set new uri template ID to the product resource int updatedURITemplateId = apiResources.get(productResourceKey).getId(); uriTemplate.setId(updatedURITemplateId); } apiMgtDAO.addAPIProductResourceMappings(productResources, organization, null); } /** * Create a product documentation * * @param product APIProduct * @param documentation Documentation * @throws APIManagementException if failed to add documentation */ private void createDocumentation(APIProduct product, Documentation documentation) throws APIManagementException { try { APIProductIdentifier productId = product.getId(); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact artifact = artifactManager.newGovernanceArtifact(new QName(documentation.getName())); artifactManager.addGenericArtifact(APIUtil.createDocArtifactContent(artifact, productId, documentation)); String productPath = APIUtil.getAPIProductPath(productId); //Adding association from api to documentation . (API Product -----> doc) registry.addAssociation(productPath, artifact.getPath(), APIConstants.DOCUMENTATION_ASSOCIATION); String docVisibility = documentation.getVisibility().name(); String[] authorizedRoles = getAuthorizedRoles(productPath); String visibility = product.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } APIUtil.setResourcePermissions(product.getId().getProviderName(),visibility, authorizedRoles, artifact .getPath(), registry); String docFilePath = artifact.getAttribute(APIConstants.DOC_FILE_PATH); if (docFilePath != null && !StringUtils.EMPTY.equals(docFilePath)) { //The docFilePatch comes as /t/tenanatdoman/registry/resource/_system/governance/apimgt/applicationdata.. //We need to remove the /t/tenanatdoman/registry/resource/_system/governance section to set permissions. int startIndex = docFilePath.indexOf(APIConstants.GOVERNANCE) + (APIConstants.GOVERNANCE).length(); String filePath = docFilePath.substring(startIndex, docFilePath.length()); APIUtil.setResourcePermissions(product.getId().getProviderName(),visibility, authorizedRoles, filePath, registry); registry.addAssociation(artifact.getPath(), filePath, APIConstants.DOCUMENTATION_FILE_ASSOCIATION); } documentation.setId(artifact.getId()); } catch (RegistryException e) { handleException("Failed to add documentation", e); } catch (UserStoreException e) { handleException("Failed to add documentation", e); } } /** * Updates a given api product documentation * * @param productId APIProductIdentifier * @param documentation Documentation * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to update docs */ public void updateDocumentation(APIProductIdentifier productId, Documentation documentation) throws APIManagementException { String productPath = APIUtil.getAPIProductPath(productId); APIProduct product = getAPIProduct(productPath); String docPath = APIUtil.getProductDocPath(productId) + documentation.getName(); try { String docArtifactId = registry.get(docPath).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact artifact = artifactManager.getGenericArtifact(docArtifactId); String docVisibility = documentation.getVisibility().name(); String[] authorizedRoles = new String[0]; String visibleRolesList = product.getVisibleRoles(); if (visibleRolesList != null) { authorizedRoles = visibleRolesList.split(","); } String visibility = product.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } GenericArtifact updateDocArtifact = APIUtil.createDocArtifactContent(artifact, productId, documentation); artifactManager.updateGenericArtifact(updateDocArtifact); APIUtil.clearResourcePermissions(docPath, productId, ((UserRegistry) registry).getTenantId()); APIUtil.setResourcePermissions(product.getId().getProviderName(), visibility, authorizedRoles, artifact.getPath(), registry); String docFilePath = artifact.getAttribute(APIConstants.DOC_FILE_PATH); if (docFilePath != null && !"".equals(docFilePath)) { // The docFilePatch comes as // /t/tenanatdoman/registry/resource/_system/governance/apimgt/applicationdata.. // We need to remove the // /t/tenanatdoman/registry/resource/_system/governance section // to set permissions. int startIndex = docFilePath.indexOf(APIConstants.GOVERNANCE) + (APIConstants.GOVERNANCE).length(); String filePath = docFilePath.substring(startIndex, docFilePath.length()); APIUtil.setResourcePermissions(product.getId().getProviderName(), visibility, authorizedRoles, filePath, registry); } } catch (RegistryException e) { handleException("Failed to update documentation", e); } } /** * Add a file to a product document of source type FILE * * @param productId APIProduct identifier the document belongs to * @param documentation document * @param filename name of the file * @param content content of the file as an Input Stream * @param contentType content type of the file * @throws APIManagementException if failed to add the file */ public void addFileToProductDocumentation(APIProductIdentifier productId, Documentation documentation, String filename, InputStream content, String contentType) throws APIManagementException { if (Documentation.DocumentSourceType.FILE.equals(documentation.getSourceType())) { contentType = "application/force-download"; ResourceFile icon = new ResourceFile(content, contentType); String filePath = APIUtil.getDocumentationFilePath(productId, filename); APIProduct apiProduct; try { apiProduct = getAPIProduct(productId); String visibleRolesList = apiProduct.getVisibleRoles(); String[] visibleRoles = new String[0]; if (visibleRolesList != null) { visibleRoles = visibleRolesList.split(","); } APIUtil.setResourcePermissions(apiProduct.getId().getProviderName(), apiProduct.getVisibility(), visibleRoles, filePath, registry); documentation.setFilePath(addResourceFile(productId, filePath, icon)); APIUtil.setFilePermission(filePath); } catch (APIManagementException e) { handleException("Failed to add file to product document " + documentation.getName(), e); } } else { String errorMsg = "Cannot add file to the Product Document. Document " + documentation.getName() + "'s Source type is not FILE."; handleException(errorMsg); } } /** * This method used to save the product documentation content * * @param apiProduct, API Product * @param documentationName, name of the inline documentation * @param text, content of the inline documentation * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to add the document as a resource to registry */ public void addProductDocumentationContent(APIProduct apiProduct, String documentationName, String text) throws APIManagementException { APIProductIdentifier identifier = apiProduct.getId(); String documentationPath = APIUtil.getProductDocPath(identifier) + documentationName; String contentPath = APIUtil.getProductDocPath(identifier) + APIConstants.INLINE_DOCUMENT_CONTENT_DIR + RegistryConstants.PATH_SEPARATOR + documentationName; boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } Resource docResource = registry.get(documentationPath); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact docArtifact = artifactManager.getGenericArtifact(docResource.getUUID()); Documentation doc = APIUtil.getDocumentation(docArtifact); Resource docContent; if (!registry.resourceExists(contentPath)) { docContent = registry.newResource(); } else { docContent = registry.get(contentPath); } /* This is a temporary fix for doc content replace issue. We need to add * separate methods to add inline content resource in document update */ if (!APIConstants.NO_CONTENT_UPDATE.equals(text)) { docContent.setContent(text); } docContent.setMediaType(APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE); registry.put(contentPath, docContent); registry.addAssociation(documentationPath, contentPath, APIConstants.DOCUMENTATION_CONTENT_ASSOCIATION); String productPath = APIUtil.getAPIProductPath(identifier); String[] authorizedRoles = getAuthorizedRoles(productPath); String docVisibility = doc.getVisibility().name(); String visibility = apiProduct.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } APIUtil.setResourcePermissions(apiProduct.getId().getProviderName(),visibility, authorizedRoles,contentPath, registry); } catch (RegistryException e) { String msg = "Failed to add the documentation content of : " + documentationName + " of API Product :" + identifier.getName(); handleException(msg, e); } catch (UserStoreException e) { String msg = "Failed to add the documentation content of : " + documentationName + " of API Product :" + identifier.getName(); handleException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } @Override public String getGraphqlSchema(APIIdentifier apiId) throws APIManagementException { return getGraphqlSchemaDefinition(apiId); } /** * Check whether the given scope name exists as a shared scope in the tenant domain. * * @param scopeName Shared Scope name * @param tenantId Tenant Id * @return Scope availability * @throws APIManagementException if failed to check the availability */ @Override public boolean isSharedScopeNameExists(String scopeName, int tenantId) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Checking whether scope name: " + scopeName + " exists as a shared scope in tenant with ID: " + tenantId); } return ApiMgtDAO.getInstance().isSharedScopeExists(scopeName, tenantId); } /** * Add Shared Scope by registering it in the KM and adding the scope as a Shared Scope in AM DB. * * @param scope Shared Scope * @param tenantDomain Tenant domain * @return UUId of the added Shared Scope object * @throws APIManagementException if failed to add a scope */ @Override public String addSharedScope(Scope scope, String tenantDomain) throws APIManagementException { Set<Scope> scopeSet = new HashSet<>(); scopeSet.add(scope); int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); addScopes(scopeSet, tenantId); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.registerScope(scope); } catch (APIManagementException e) { log.error("Error occurred while registering Scope in Key Manager " + keyManagerDtoEntry.getKey(), e); } } if (log.isDebugEnabled()) { log.debug("Adding shared scope mapping: " + scope.getKey() + " to Key Manager : " + keyManagerDtoEntry.getKey()); } } return ApiMgtDAO.getInstance().addSharedScope(scope, tenantDomain); } /** * Get all available shared scopes. * * @param tenantDomain tenant domain * @return Shared Scope list * @throws APIManagementException if failed to get the scope list */ @Override public List<Scope> getAllSharedScopes(String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Retrieving all the shared scopes for tenant: " + tenantDomain); } //Get all shared scopes List<Scope> allSharedScopes = ApiMgtDAO.getInstance().getAllSharedScopes(tenantDomain); //Get all scopes from KM List<Scope> allScopes = scopesDAO.getScopes(APIUtil.getTenantIdFromTenantDomain(tenantDomain)); for (Scope scope : allSharedScopes) { for (Scope tempScope : allScopes) { if (scope.getKey().equals(tempScope.getKey())) { scope.setName(tempScope.getName()); scope.setDescription(tempScope.getDescription()); scope.setRoles(tempScope.getRoles()); break; } } } return allSharedScopes; } /** * Get all available shared scope keys. * * @param tenantDomain tenant domain * @return Shared Scope Keyset * @throws APIManagementException if failed to get the scope key set */ @Override public Set<String> getAllSharedScopeKeys(String tenantDomain) throws APIManagementException { //Get all shared scope keys return ApiMgtDAO.getInstance().getAllSharedScopeKeys(tenantDomain); } /** * Get shared scope by UUID. * * @param sharedScopeId Shared scope Id * @param tenantDomain tenant domain * @return Shared Scope * @throws APIManagementException If failed to get the scope */ @Override public Scope getSharedScopeByUUID(String sharedScopeId, String tenantDomain) throws APIManagementException { Scope sharedScope; if (log.isDebugEnabled()) { log.debug("Retrieving shared scope: " + sharedScopeId); } String scopeKey = ApiMgtDAO.getInstance().getSharedScopeKeyByUUID(sharedScopeId); if (scopeKey != null) { sharedScope = scopesDAO.getScope(scopeKey, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); sharedScope.setId(sharedScopeId); } else { throw new APIMgtResourceNotFoundException("Shared Scope not found for scope ID: " + sharedScopeId, ExceptionCodes.from(ExceptionCodes.SHARED_SCOPE_NOT_FOUND, sharedScopeId)); } return sharedScope; } /** * Delete shared scope. * * @param scopeName Shared scope name * @param tenantDomain tenant domain * @throws APIManagementException If failed to delete the scope */ @Override public void deleteSharedScope(String scopeName, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Deleting shared scope " + scopeName); } Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.deleteScope(scopeName); } catch (APIManagementException e) { log.error("Error while Deleting Shared Scope " + scopeName + " from Key Manager " + keyManagerEntry.getKey(), e); } } } apiMgtDAO.deleteSharedScope(scopeName, tenantDomain); deleteScope(scopeName, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); } /** * Update a shared scope. * * @param sharedScope Shared Scope * @param tenantDomain tenant domain * @throws APIManagementException If failed to update */ @Override public void updateSharedScope(Scope sharedScope, String tenantDomain) throws APIManagementException { int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.updateScope(sharedScope); } catch (APIManagementException e) { log.error("Error while Updating Shared Scope " + sharedScope.getKey() + " from Key Manager " + keyManagerEntry.getKey(), e); } } } updateScope(sharedScope, tenantId); } /** * Validate a shared scopes set. Add the additional attributes (scope description, bindings etc). * * @param scopes Shared scopes set * @throws APIManagementException If failed to validate */ @Override public void validateSharedScopes(Set<Scope> scopes, String tenantDomain) throws APIManagementException { Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { keyManager.validateScopes(scopes); } } } @Override /** * Get the API and URI usages of the given shared scope * * @param uuid UUID of the shared scope * @param tenantId ID of the Tenant domain * @throws APIManagementException If failed to validate */ public SharedScopeUsage getSharedScopeUsage(String uuid, int tenantId) throws APIManagementException { return ApiMgtDAO.getInstance().getSharedScopeUsage(uuid, tenantId); } /** * This method returns the security audit properties * * @param userId user id * @return JSONObject security audit properties * @throws APIManagementException */ public JSONObject getSecurityAuditAttributesFromConfig(String userId) throws APIManagementException { String tenantDomain = MultitenantUtils.getTenantDomain(userId); JSONObject securityAuditConfig = APIUtil.getSecurityAuditAttributesFromRegistry(tenantDomain); if (securityAuditConfig != null) { if ((securityAuditConfig.get(APIConstants.SECURITY_AUDIT_OVERRIDE_GLOBAL) != null) && securityAuditConfig.get(APIConstants.SECURITY_AUDIT_OVERRIDE_GLOBAL) instanceof Boolean && (Boolean) securityAuditConfig.get(APIConstants.SECURITY_AUDIT_OVERRIDE_GLOBAL)) { String apiToken = (String) securityAuditConfig.get(APIConstants.SECURITY_AUDIT_API_TOKEN); String collectionId = (String) securityAuditConfig.get(APIConstants.SECURITY_AUDIT_COLLECTION_ID); JSONObject tenantProperties = new JSONObject(); if (StringUtils.isNotEmpty(apiToken) && StringUtils.isNotEmpty(collectionId)) { tenantProperties.put(APIConstants.SECURITY_AUDIT_API_TOKEN, apiToken); tenantProperties.put(APIConstants.SECURITY_AUDIT_COLLECTION_ID, collectionId); return tenantProperties; } } else { return getSecurityAuditConfigurationProperties(tenantDomain); } } else { return getSecurityAuditConfigurationProperties(tenantDomain); } return null; } @Override public void saveAsyncApiDefinition(API api, String jsonText) throws APIManagementException { String apiId; String organization = api.getOrganization(); if (api.getUuid() != null) { apiId = api.getUuid(); } else if (api.getId().getUUID() != null) { apiId = api.getId().getUUID(); } else { apiId = apiMgtDAO.getUUIDFromIdentifier(api.getId().getProviderName(), api.getId().getApiName(), api.getId().getVersion(), organization); } try { apiPersistenceInstance.saveAsyncDefinition(new Organization(organization), apiId, jsonText); } catch (AsyncSpecPersistenceException e) { throw new APIManagementException("Error while persisting Async API definition ", e); } } /** * This method returns security audit properties from the API Manager Configuration * * @param tenantDomain tenant domain name * @return JSONObject security audit properties */ private JSONObject getSecurityAuditConfigurationProperties(String tenantDomain) { APIManagerConfiguration configuration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String apiToken = configuration.getFirstProperty(APIConstants.API_SECURITY_AUDIT_API_TOKEN); String collectionId = configuration.getFirstProperty(APIConstants.API_SECURITY_AUDIT_CID); String baseUrl = configuration.getFirstProperty(APIConstants.API_SECURITY_AUDIT_BASE_URL); boolean isGlobal = Boolean.parseBoolean(configuration.getFirstProperty(APIConstants.API_SECURITY_AUDIT_GLOBAL)); JSONObject configProperties = new JSONObject(); if (StringUtils.isNotEmpty(apiToken) && StringUtils.isNotEmpty(collectionId)) { configProperties.put(APIConstants.SECURITY_AUDIT_API_TOKEN, apiToken); configProperties.put(APIConstants.SECURITY_AUDIT_COLLECTION_ID, collectionId); configProperties.put(APIConstants.SECURITY_AUDIT_BASE_URL, baseUrl); if (isGlobal || "carbon.super".equals(tenantDomain)) { return configProperties; } else { return null; } } return null; } @Override public List<APIResource> getRemovedProductResources(Set<URITemplate> updatedUriTemplates, API existingAPI) { Set<URITemplate> existingUriTemplates = existingAPI.getUriTemplates(); List<APIResource> removedReusedResources = new ArrayList<>(); for (URITemplate existingUriTemplate : existingUriTemplates) { // If existing URITemplate is used by any API Products if (!existingUriTemplate.retrieveUsedByProducts().isEmpty()) { String existingVerb = existingUriTemplate.getHTTPVerb(); String existingPath = existingUriTemplate.getUriTemplate(); boolean isReusedResourceRemoved = true; for (URITemplate updatedUriTemplate : updatedUriTemplates) { String updatedVerb = updatedUriTemplate.getHTTPVerb(); String updatedPath = updatedUriTemplate.getUriTemplate(); //Check if existing reused resource is among updated resources if (existingVerb.equalsIgnoreCase(updatedVerb) && existingPath.equalsIgnoreCase(updatedPath)) { isReusedResourceRemoved = false; break; } } // Existing reused resource is not among updated resources if (isReusedResourceRemoved) { APIResource removedResource = new APIResource(existingVerb, existingPath); removedReusedResources.add(removedResource); } } } return removedReusedResources; } private void addScopes(Set<Scope> scopes, int tenantId) throws APIManagementException { if (scopes != null) { scopesDAO.addScopes(scopes, tenantId); for (Scope scope : scopes) { ScopeEvent scopeEvent = new ScopeEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.SCOPE_CREATE.name(), tenantId, tenantDomain, scope.getKey(), scope.getName(), scope.getDescription()); if (StringUtils.isNotEmpty(scope.getRoles()) && scope.getRoles().trim().length() > 0) { scopeEvent.setRoles(Arrays.asList(scope.getRoles().split(","))); } APIUtil.sendNotification(scopeEvent, APIConstants.NotifierType.SCOPE.name()); } } } private void updateScope(Scope scope, int tenantId) throws APIManagementException { if (scope != null) { scopesDAO.updateScope(scope, tenantId); ScopeEvent scopeEvent = new ScopeEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.SCOPE_UPDATE.name(), tenantId, tenantDomain, scope.getKey(), scope.getName(), scope.getDescription()); if (StringUtils.isNotEmpty(scope.getRoles()) && scope.getRoles().trim().length() > 0) { scopeEvent.setRoles(Arrays.asList(scope.getRoles().split(","))); } APIUtil.sendNotification(scopeEvent, APIConstants.NotifierType.SCOPE.name()); } } private void deleteScope(String scopeKey, int tenantId) throws APIManagementException { if (StringUtils.isNotEmpty(scopeKey)) { scopesDAO.deleteScope(scopeKey, tenantId); ScopeEvent scopeEvent = new ScopeEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.SCOPE_DELETE.name(), tenantId, tenantDomain, scopeKey, null, null); APIUtil.sendNotification(scopeEvent, APIConstants.NotifierType.SCOPE.name()); } } private void deleteScopes(Set<String> scopes, int tenantId) throws APIManagementException { if (scopes != null) { for (String scope : scopes) { deleteScope(scope, tenantId); } } } @Override public API getAPIbyUUID(String uuid, String organization) throws APIManagementException { Organization org = new Organization(organization); try { PublisherAPI publisherAPI = apiPersistenceInstance.getPublisherAPI(org, uuid); if (publisherAPI != null) { API api = APIMapper.INSTANCE.toApi(publisherAPI); APIIdentifier apiIdentifier = api.getId(); apiIdentifier.setUuid(uuid); api.setId(apiIdentifier); checkAccessControlPermission(userNameWithoutChange, api.getAccessControl(), api.getAccessControlRoles()); /////////////////// Do processing on the data object////////// populateRevisionInformation(api, uuid); populateAPIInformation(uuid, organization, api); loadMediationPoliciesToAPI(api, organization); populateAPIStatus(api); populateDefaultVersion(api); return api; } else { String msg = "Failed to get API. API artifact corresponding to artifactId " + uuid + " does not exist"; throw new APIMgtResourceNotFoundException(msg); } } catch (APIPersistenceException e) { throw new APIManagementException("Failed to get API", e); } catch (OASPersistenceException e) { throw new APIManagementException("Error while retrieving the OAS definition", e); } catch (ParseException e) { throw new APIManagementException("Error while parsing the OAS definition", e); } catch (AsyncSpecPersistenceException e) { throw new APIManagementException("Error while retrieving the Async API definition", e); } } private void populateAPITier(APIProduct apiProduct) throws APIManagementException { if (apiProduct.isRevision()) { String apiLevelTier = apiMgtDAO.getAPILevelTier(apiProduct.getRevisionedApiProductId(), apiProduct.getUuid()); apiProduct.setProductLevelPolicy(apiLevelTier); } } private void populateRevisionInformation(API api, String revisionUUID) throws APIManagementException { APIRevision apiRevision = apiMgtDAO.checkAPIUUIDIsARevisionUUID(revisionUUID); if (apiRevision != null && !StringUtils.isEmpty(apiRevision.getApiUUID())) { api.setRevision(true); api.setRevisionedApiId(apiRevision.getApiUUID()); api.setRevisionId(apiRevision.getId()); } } private void populateRevisionInformation(APIProduct apiProduct, String revisionUUID) throws APIManagementException { APIRevision apiRevision = apiMgtDAO.checkAPIUUIDIsARevisionUUID(revisionUUID); if (apiRevision != null && !StringUtils.isEmpty(apiRevision.getApiUUID())) { apiProduct.setRevision(true); apiProduct.setRevisionedApiProductId(apiRevision.getApiUUID()); apiProduct.setRevisionId(apiRevision.getId()); } } private void populateAPIStatus(API api) throws APIManagementException { if (api.isRevision()) { api.setStatus(apiMgtDAO.getAPIStatusFromAPIUUID(api.getRevisionedApiId())); } else { api.setStatus(apiMgtDAO.getAPIStatusFromAPIUUID(api.getUuid())); } } private void populateAPIStatus(APIProduct apiProduct) throws APIManagementException { if (apiProduct.isRevision()) { apiProduct.setState(apiMgtDAO.getAPIStatusFromAPIUUID(apiProduct.getRevisionedApiProductId())); } else { apiProduct.setState(apiMgtDAO.getAPIStatusFromAPIUUID(apiProduct.getUuid())); } } public APIProduct getAPIProductbyUUID(String uuid, String organization) throws APIManagementException { try { Organization org = new Organization(organization); PublisherAPIProduct publisherAPIProduct = apiPersistenceInstance.getPublisherAPIProduct(org, uuid); if (publisherAPIProduct != null) { APIProduct product = APIProductMapper.INSTANCE.toApiProduct(publisherAPIProduct); product.setID(new APIProductIdentifier(publisherAPIProduct.getProviderName(), publisherAPIProduct.getApiProductName(), publisherAPIProduct.getVersion(), uuid)); checkAccessControlPermission(userNameWithoutChange, product.getAccessControl(), product.getAccessControlRoles()); populateAPIProductInformation(uuid, organization, product); populateRevisionInformation(product, uuid); populateAPIStatus(product); populateAPITier(product); return product; } else { String msg = "Failed to get API Product. API Product artifact corresponding to artifactId " + uuid + " does not exist"; throw new APIMgtResourceNotFoundException(msg); } } catch (APIPersistenceException | OASPersistenceException | ParseException e) { String msg = "Failed to get API Product"; throw new APIManagementException(msg, e); } } @Override public Map<String, Object> searchPaginatedAPIs(String searchQuery, String organization, int start, int end, String sortBy, String sortOrder) throws APIManagementException { Map<String, Object> result = new HashMap<String, Object>(); if (log.isDebugEnabled()) { log.debug("Original search query received : " + searchQuery); } Organization org = new Organization(organization); String[] roles = APIUtil.getFilteredUserRoles(userNameWithoutChange); Map<String, Object> properties = APIUtil.getUserProperties(userNameWithoutChange); UserContext userCtx = new UserContext(userNameWithoutChange, org, properties, roles); try { PublisherAPISearchResult searchAPIs = apiPersistenceInstance.searchAPIsForPublisher(org, searchQuery, start, end, userCtx, sortBy, sortOrder); if (log.isDebugEnabled()) { log.debug("searched APIs for query : " + searchQuery + " :-->: " + searchAPIs.toString()); } Set<Object> apiSet = new LinkedHashSet<>(); if (searchAPIs != null) { List<PublisherAPIInfo> list = searchAPIs.getPublisherAPIInfoList(); List<Object> apiList = new ArrayList<>(); for (PublisherAPIInfo publisherAPIInfo : list) { API mappedAPI = APIMapper.INSTANCE.toApi(publisherAPIInfo); populateAPIStatus(mappedAPI); populateDefaultVersion(mappedAPI); apiList.add(mappedAPI); } apiSet.addAll(apiList); result.put("apis", apiSet); result.put("length", searchAPIs.getTotalAPIsCount()); result.put("isMore", true); } else { result.put("apis", apiSet); result.put("length", 0); result.put("isMore", false); } } catch (APIPersistenceException e) { throw new APIManagementException("Error while searching the api ", e); } return result ; } @Override public String addComment(String uuid, Comment comment, String user) throws APIManagementException { return apiMgtDAO.addComment(uuid, comment, user); } @Override public Comment getComment(ApiTypeWrapper apiTypeWrapper, String commentId, Integer replyLimit, Integer replyOffset) throws APIManagementException { return apiMgtDAO.getComment(apiTypeWrapper, commentId, replyLimit, replyOffset); } @Override public org.wso2.carbon.apimgt.api.model.CommentList getComments(ApiTypeWrapper apiTypeWrapper, String parentCommentID, Integer replyLimit, Integer replyOffset) throws APIManagementException { return apiMgtDAO.getComments(apiTypeWrapper, parentCommentID, replyLimit, replyOffset); } @Override public boolean editComment(ApiTypeWrapper apiTypeWrapper, String commentId, Comment comment) throws APIManagementException { return apiMgtDAO.editComment(apiTypeWrapper, commentId, comment); } @Override public boolean deleteComment(ApiTypeWrapper apiTypeWrapper, String commentId) throws APIManagementException { return apiMgtDAO.deleteComment(apiTypeWrapper, commentId); } /** * Get minimal details of API by registry artifact id * * @param uuid Registry artifact id * @param organization identifier of the organization * @return API of the provided artifact id * @throws APIManagementException */ @Override public API getLightweightAPIByUUID(String uuid, String organization) throws APIManagementException { try { Organization org = new Organization(organization); PublisherAPI publisherAPI = apiPersistenceInstance.getPublisherAPI(org, uuid); if (publisherAPI != null) { API api = APIMapper.INSTANCE.toApi(publisherAPI); checkAccessControlPermission(userNameWithoutChange, api.getAccessControl(), api.getAccessControlRoles()); /// populate relavant external info // environment String environmentString = null; if (api.getEnvironments() != null) { environmentString = String.join(",", api.getEnvironments()); } api.setEnvironments(APIUtil.extractEnvironmentsForAPI(environmentString)); //CORS . if null is returned, set default config from the configuration if (api.getCorsConfiguration() == null) { api.setCorsConfiguration(APIUtil.getDefaultCorsConfiguration()); } api.setOrganization(organization); return api; } else { String msg = "Failed to get API. API artifact corresponding to artifactId " + uuid + " does not exist"; throw new APIMgtResourceNotFoundException(msg); } } catch (APIPersistenceException e) { String msg = "Failed to get API with uuid " + uuid; throw new APIManagementException(msg, e); } } @Override public List<APIResource> getUsedProductResources(String uuid) throws APIManagementException { List<APIResource> usedProductResources = new ArrayList<>(); Map<Integer, URITemplate> uriTemplates = ApiMgtDAO.getInstance().getURITemplatesOfAPIWithProductMapping(uuid); for (URITemplate uriTemplate : uriTemplates.values()) { // If existing URITemplate is used by any API Products if (!uriTemplate.retrieveUsedByProducts().isEmpty()) { APIResource apiResource = new APIResource(uriTemplate.getHTTPVerb(), uriTemplate.getUriTemplate()); usedProductResources.add(apiResource); } } return usedProductResources; } @Override public void addDocumentationContent(String uuid, String docId, String organization, DocumentationContent content) throws APIManagementException { DocumentContent mappedContent = null; try { mappedContent = DocumentMapper.INSTANCE.toDocumentContent(content); DocumentContent doc = apiPersistenceInstance.addDocumentationContent(new Organization(organization), uuid, docId, mappedContent); } catch (DocumentationPersistenceException e) { throw new APIManagementException("Error while adding content to doc " + docId); } } @Override public void addWSDLResource(String apiId, ResourceFile resource, String url, String organization) throws APIManagementException { if (!StringUtils.isEmpty(url)) { URL wsdlUrl; try { wsdlUrl = new URL(url); } catch (MalformedURLException e) { throw new APIManagementException("Invalid/Malformed WSDL URL : " + url, e, ExceptionCodes.INVALID_WSDL_URL_EXCEPTION); } // Get the WSDL 1.1 or 2.0 processor and process the content based on the version WSDLProcessor wsdlProcessor = APIMWSDLReader.getWSDLProcessorForUrl(wsdlUrl); InputStream wsdlContent = wsdlProcessor.getWSDL(); // wsdlResource.setContentStream(wsdlContent); org.wso2.carbon.apimgt.persistence.dto.ResourceFile wsdlResourceFile = new org.wso2.carbon.apimgt.persistence.dto.ResourceFile( wsdlContent, null); try { apiPersistenceInstance.saveWSDL( new Organization(organization), apiId, wsdlResourceFile); } catch (WSDLPersistenceException e) { throw new APIManagementException("Error while adding WSDL to api " + apiId, e); } } else if (resource != null) { org.wso2.carbon.apimgt.persistence.dto.ResourceFile wsdlResourceFile = new org.wso2.carbon.apimgt.persistence.dto.ResourceFile( resource.getContent(), resource.getContentType()); try { apiPersistenceInstance.saveWSDL( new Organization(organization), apiId, wsdlResourceFile); } catch (WSDLPersistenceException e) { throw new APIManagementException("Error while adding WSDL to api " + apiId, e); } } } @Override public Map<String, Object> searchPaginatedContent(String searchQuery, String organization, int start, int end) throws APIManagementException { ArrayList<Object> compoundResult = new ArrayList<Object>(); Map<Documentation, API> docMap = new HashMap<Documentation, API>(); Map<Documentation, APIProduct> productDocMap = new HashMap<Documentation, APIProduct>(); Map<String, Object> result = new HashMap<String, Object>(); SortedSet<API> apiSet = new TreeSet<API>(new APINameComparator()); SortedSet<APIProduct> apiProductSet = new TreeSet<APIProduct>(new APIProductNameComparator()); String userame = userNameWithoutChange; Organization org = new Organization(organization); Map<String, Object> properties = APIUtil.getUserProperties(userame); String[] roles = APIUtil.getFilteredUserRoles(userame); UserContext ctx = new UserContext(userame, org, properties, roles); try { PublisherContentSearchResult results = apiPersistenceInstance.searchContentForPublisher(org, searchQuery, start, end, ctx); if (results != null) { List<SearchContent> resultList = results.getResults(); for (SearchContent item : resultList) { if ("API".equals(item.getType())) { PublisherSearchContent publiserAPI = (PublisherSearchContent) item; API api = new API(new APIIdentifier(publiserAPI.getProvider(), publiserAPI.getName(), publiserAPI.getVersion())); api.setUuid(publiserAPI.getId()); api.setContext(publiserAPI.getContext()); api.setContextTemplate(publiserAPI.getContext()); api.setStatus(publiserAPI.getStatus()); apiSet.add(api); } else if ("APIProduct".equals(item.getType())) { PublisherSearchContent publiserAPI = (PublisherSearchContent) item; APIProduct api = new APIProduct(new APIProductIdentifier(publiserAPI.getProvider(), publiserAPI.getName(), publiserAPI.getVersion())); api.setUuid(publiserAPI.getId()); api.setContextTemplate(publiserAPI.getContext()); api.setState(publiserAPI.getStatus()); apiProductSet.add(api); } else if (item instanceof DocumentSearchContent) { // doc item DocumentSearchContent docItem = (DocumentSearchContent) item; Documentation doc = new Documentation( DocumentationType.valueOf(docItem.getDocType().toString()), docItem.getName()); doc.setSourceType(DocumentSourceType.valueOf(docItem.getSourceType().toString())); doc.setVisibility(DocumentVisibility.valueOf(docItem.getVisibility().toString())); doc.setId(docItem.getId()); if ("API".equals(docItem.getAssociatedType())) { API api = new API(new APIIdentifier(docItem.getApiProvider(), docItem.getApiName(), docItem.getApiVersion())); api.setUuid(docItem.getApiUUID()); docMap.put(doc, api); } else if ("APIProduct".equals(docItem.getAssociatedType())) { APIProduct api = new APIProduct(new APIProductIdentifier(docItem.getApiProvider(), docItem.getApiName(), docItem.getApiVersion())); api.setUuid(docItem.getApiUUID()); productDocMap.put(doc, api); } } } compoundResult.addAll(apiSet); compoundResult.addAll(apiProductSet); compoundResult.addAll(docMap.entrySet()); compoundResult.addAll(productDocMap.entrySet()); compoundResult.sort(new ContentSearchResultNameComparator()); result.put("length", results.getTotalCount() ); } else { result.put("length", compoundResult.size() ); } } catch (APIPersistenceException e) { throw new APIManagementException("Error while searching content ", e); } result.put("apis", compoundResult); return result; } @Override public void setThumbnailToAPI(String apiId, ResourceFile resource, String organization) throws APIManagementException { try { org.wso2.carbon.apimgt.persistence.dto.ResourceFile iconResourceFile = new org.wso2.carbon.apimgt.persistence.dto.ResourceFile( resource.getContent(), resource.getContentType()); apiPersistenceInstance.saveThumbnail(new Organization(organization), apiId, iconResourceFile); } catch (ThumbnailPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while saving thumbnail ", e); } } } @Override public List<Mediation> getAllApiSpecificMediationPolicies(String apiId, String organization) throws APIManagementException { List<Mediation> mappedList = new ArrayList<Mediation>(); try { List<MediationInfo> list = apiPersistenceInstance.getAllMediationPolicies( new Organization(organization), apiId); if (list != null) { for (MediationInfo mediationInfo : list) { Mediation mediation = new Mediation(); mediation.setName(mediationInfo.getName()); mediation.setUuid(mediationInfo.getId()); mediation.setType(mediationInfo.getType()); mappedList.add(mediation); } } } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while accessing mediation policies ", e); } } return mappedList; } @Override public Mediation getApiSpecificMediationPolicyByPolicyId(String apiId, String policyId, String organization) throws APIManagementException { try { org.wso2.carbon.apimgt.persistence.dto.Mediation policy = apiPersistenceInstance.getMediationPolicy( new Organization(organization), apiId, policyId); if (policy != null) { Mediation mediation = new Mediation(); mediation.setName(policy.getName()); mediation.setUuid(policy.getId()); mediation.setType(policy.getType()); mediation.setConfig(policy.getConfig()); return mediation; } } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while accessing mediation policies ", e); } } return null; } @Override public Mediation addApiSpecificMediationPolicy(String apiId, Mediation mediationPolicy, String organization) throws APIManagementException { if (StringUtils.isNotBlank(mediationPolicy.getName()) && mediationPolicy.getName().length() > APIConstants.MAX_LENGTH_MEDIATION_POLICY_NAME) { throw new APIManagementException(ExceptionCodes.from(ExceptionCodes.MEDIATION_POLICY_NAME_TOO_LONG, APIConstants.MAX_LENGTH_MEDIATION_POLICY_NAME + "")); } try { org.wso2.carbon.apimgt.persistence.dto.Mediation mappedPolicy = new org.wso2.carbon.apimgt.persistence.dto.Mediation(); mappedPolicy.setConfig(mediationPolicy.getConfig()); mappedPolicy.setName(mediationPolicy.getName()); mappedPolicy.setType(mediationPolicy.getType()); org.wso2.carbon.apimgt.persistence.dto.Mediation returnedMappedPolicy = apiPersistenceInstance .addMediationPolicy(new Organization(organization), apiId, mappedPolicy); if (returnedMappedPolicy != null) { mediationPolicy.setUuid(returnedMappedPolicy.getId()); return mediationPolicy; } } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else if (e.getErrorHandler() == ExceptionCodes.MEDIATION_POLICY_API_ALREADY_EXISTS) { throw new APIManagementException(ExceptionCodes.MEDIATION_POLICY_API_ALREADY_EXISTS); } else { throw new APIManagementException("Error while saving mediation policy ", e); } } return null; } @Override public Mediation updateApiSpecificMediationPolicyContent(String apiId, Mediation mediationPolicy, String organization) throws APIManagementException { try { org.wso2.carbon.apimgt.persistence.dto.Mediation mappedPolicy = new org.wso2.carbon.apimgt.persistence.dto.Mediation(); mappedPolicy.setConfig(mediationPolicy.getConfig()); mappedPolicy.setName(mediationPolicy.getName()); mappedPolicy.setType(mediationPolicy.getType()); mappedPolicy.setId(mediationPolicy.getUuid()); org.wso2.carbon.apimgt.persistence.dto.Mediation returnedMappedPolicy = apiPersistenceInstance .updateMediationPolicy(new Organization(organization), apiId, mappedPolicy); if (returnedMappedPolicy != null) { return mediationPolicy; } } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while saving mediation policy ", e); } } return null; } @Override public void deleteApiSpecificMediationPolicy(String apiId, String mediationPolicyId, String orgId) throws APIManagementException { try { apiPersistenceInstance.deleteMediationPolicy(new Organization(orgId), apiId, mediationPolicyId); } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while saving mediation policy ", e); } } } protected void checkAccessControlPermission(String userNameWithTenantDomain, String accessControlProperty, String publisherAccessControlRoles) throws APIManagementException { // String userNameWithTenantDomain = (userNameWithoutChange != null) ? userNameWithoutChange : username; if (accessControlProperty == null || accessControlProperty.trim().isEmpty() || accessControlProperty.equalsIgnoreCase(APIConstants.NO_ACCESS_CONTROL)) { if (log.isDebugEnabled()) { log.debug("API does not have any access control restriction"); } return; } if (APIUtil.hasPermission(userNameWithTenantDomain, APIConstants.Permissions.APIM_ADMIN)) { return; } if (publisherAccessControlRoles != null && !publisherAccessControlRoles.trim().isEmpty()) { String[] accessControlRoleList = publisherAccessControlRoles.replaceAll("\\s+", "").split(","); if (log.isDebugEnabled()) { log.debug("API has restricted access to creators and publishers with the roles : " + Arrays.toString(accessControlRoleList)); } String[] userRoleList = APIUtil.getListOfRoles(userNameWithTenantDomain); if (log.isDebugEnabled()) { log.debug("User " + username + " has roles " + Arrays.toString(userRoleList)); } for (String role : accessControlRoleList) { if (!role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) && APIUtil.compareRoleList(userRoleList, role)) { return; } } throw new APIManagementException(APIConstants.UN_AUTHORIZED_ERROR_MESSAGE + " view or modify the api"); } } @Override public void saveGraphqlSchemaDefinition(String apiId, String definition, String organization) throws APIManagementException { try { apiPersistenceInstance.saveGraphQLSchemaDefinition(new Organization(organization), apiId, definition); } catch (GraphQLPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while saving graphql definition ", e); } } } /** * Returns APIProduct Search result based on the provided query. * * @param registry * @param searchQuery Ex: provider=*admin* * @return APIProduct result * @throws APIManagementException */ public Map<String, Object> searchPaginatedAPIProducts(Registry registry, String searchQuery, int start, int end) throws APIManagementException { SortedSet<APIProduct> productSet = new TreeSet<APIProduct>(new APIProductNameComparator()); List<APIProduct> productList = new ArrayList<APIProduct>(); Map<String, Object> result = new HashMap<String, Object>(); if (log.isDebugEnabled()) { log.debug("Original search query received : " + searchQuery); } Organization org = new Organization(tenantDomain); String[] roles = APIUtil.getFilteredUserRoles(userNameWithoutChange); Map<String, Object> properties = APIUtil.getUserProperties(userNameWithoutChange); UserContext userCtx = new UserContext(userNameWithoutChange, org, properties, roles); try { PublisherAPIProductSearchResult searchAPIs = apiPersistenceInstance.searchAPIProductsForPublisher(org, searchQuery, start, end, userCtx); if (log.isDebugEnabled()) { log.debug("searched API products for query : " + searchQuery + " :-->: " + searchAPIs.toString()); } if (searchAPIs != null) { List<PublisherAPIProductInfo> list = searchAPIs.getPublisherAPIProductInfoList(); List<Object> apiList = new ArrayList<>(); for (PublisherAPIProductInfo publisherAPIInfo : list) { APIProduct mappedAPI = new APIProduct(new APIProductIdentifier(publisherAPIInfo.getProviderName(), publisherAPIInfo.getApiProductName(), publisherAPIInfo.getVersion())); mappedAPI.setUuid(publisherAPIInfo.getId()); mappedAPI.setState(publisherAPIInfo.getState()); mappedAPI.setContext(publisherAPIInfo.getContext()); mappedAPI.setApiSecurity(publisherAPIInfo.getApiSecurity()); productList.add(mappedAPI); } productSet.addAll(productList); result.put("products", productSet); result.put("length", searchAPIs.getTotalAPIsCount()); result.put("isMore", true); } else { result.put("products", productSet); result.put("length", 0); result.put("isMore", false); } } catch (APIPersistenceException e) { throw new APIManagementException("Error while searching the api ", e); } return result ; } /** * Adds a new APIRevision to an existing API * * @param apiRevision APIRevision * @throws APIManagementException if failed to add APIRevision */ @Override public String addAPIRevision(APIRevision apiRevision, String organization) throws APIManagementException { int revisionCountPerAPI = apiMgtDAO.getRevisionCountByAPI(apiRevision.getApiUUID()); if (revisionCountPerAPI > 4) { String errorMessage = "Maximum number of revisions per API has reached. " + "Need to remove stale revision to create a new Revision for API with API UUID:" + apiRevision.getApiUUID(); throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes.MAXIMUM_REVISIONS_REACHED, apiRevision.getApiUUID())); } int revisionId = apiMgtDAO.getMostRecentRevisionId(apiRevision.getApiUUID()) + 1; apiRevision.setId(revisionId); APIIdentifier apiId = APIUtil.getAPIIdentifierFromUUID(apiRevision.getApiUUID()); if (apiId == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiRevision.getApiUUID(), ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiRevision.getApiUUID())); } apiId.setUuid(apiRevision.getApiUUID()); String revisionUUID; try { revisionUUID = apiPersistenceInstance.addAPIRevision(new Organization(organization), apiId.getUUID(), revisionId); } catch (APIPersistenceException e) { String errorMessage = "Failed to add revision registry artifacts"; throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes. ERROR_CREATING_API_REVISION, apiRevision.getApiUUID())); } if (StringUtils.isEmpty(revisionUUID)) { String errorMessage = "Failed to retrieve revision uuid"; throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes.API_REVISION_UUID_NOT_FOUND)); } apiRevision.setRevisionUUID(revisionUUID); apiMgtDAO.addAPIRevision(apiRevision); if (importExportAPI != null) { try { File artifact = importExportAPI .exportAPI(apiRevision.getApiUUID(), revisionUUID, true, ExportFormat.JSON, false, true, organization); // Keeping the organization as tenant domain since MG does not support organization-wise deployment // Artifacts will be deployed in ST for all organizations gatewayArtifactsMgtDAO.addGatewayAPIArtifactAndMetaData(apiRevision.getApiUUID(), apiId.getApiName(), apiId.getVersion(), apiRevision.getRevisionUUID(), tenantDomain, APIConstants.HTTP_PROTOCOL, artifact); if (artifactSaver != null) { // Keeping the organization as tenant domain since MG does not support organization-wise deployment // Artifacts will be deployed in ST for all organizations artifactSaver.saveArtifact(apiRevision.getApiUUID(), apiId.getApiName(), apiId.getVersion(), apiRevision.getRevisionUUID(), tenantDomain, artifact); } } catch (APIImportExportException | ArtifactSynchronizerException e) { throw new APIManagementException("Error while Store the Revision Artifact", ExceptionCodes.from(ExceptionCodes.API_REVISION_UUID_NOT_FOUND)); } } return revisionUUID; } /** * Get a Revision related to provided and revision UUID * * @param revisionUUID API Revision UUID * @return API Revision * @throws APIManagementException if failed to get the related API revision */ @Override public APIRevision getAPIRevision(String revisionUUID) throws APIManagementException { return apiMgtDAO.getRevisionByRevisionUUID(revisionUUID); } /** * Get the revision UUID from the Revision no and API UUID * * @param revisionNum revision number * @param apiUUID UUID of the API * @return UUID of the revision * @throws APIManagementException if failed to get the API revision uuid */ @Override public String getAPIRevisionUUID(String revisionNum, String apiUUID) throws APIManagementException { return apiMgtDAO.getRevisionUUID(revisionNum, apiUUID); } /** * Get the earliest revision UUID from the revision list for a given API * * @param apiUUID API UUID * @return Earliest revision's UUID * @throws APIManagementException if failed to get the revision */ @Override public String getEarliestRevisionUUID(String apiUUID) throws APIManagementException { return apiMgtDAO.getEarliestRevision(apiUUID); } /** * Get the latest revision UUID from the revision list for a given API * * @param apiUUID API UUID * @return Latest revision's UUID * @throws APIManagementException if failed to get the revision */ @Override public String getLatestRevisionUUID(String apiUUID) throws APIManagementException { return apiMgtDAO.getLatestRevisionUUID(apiUUID); } /** * Get a List of API Revisions related to provided API UUID * * @param apiUUID API UUID * @return API Revision List * @throws APIManagementException if failed to get the related API revision */ @Override public List<APIRevision> getAPIRevisions(String apiUUID) throws APIManagementException { return apiMgtDAO.getRevisionsListByAPIUUID(apiUUID); } /** * Adds a new APIRevisionDeployment to an existing API * * @param apiId API UUID * @param apiRevisionId API Revision UUID * @param apiRevisionDeployments List of APIRevisionDeployment objects * @param organization identifier of the organization * @throws APIManagementException if failed to add APIRevision */ @Override public void deployAPIRevision(String apiId, String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments, String organization) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> currentApiRevisionDeploymentList = apiMgtDAO.getAPIRevisionDeploymentsByApiUUID(apiId); APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); API api = getLightweightAPIByUUID(apiId, organization); api.setRevisionedApiId(apiRevision.getRevisionUUID()); api.setRevisionId(apiRevision.getId()); api.setUuid(apiId); api.getId().setUuid(apiId); api.setOrganization(organization); Set<String> environmentsToAdd = new HashSet<>(); Map<String, String> gatewayVhosts = new HashMap<>(); Set<APIRevisionDeployment> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeployments) { for (APIRevisionDeployment currentapiRevisionDeployment : currentApiRevisionDeploymentList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), apiRevisionDeployment.getDeployment())) { environmentsToRemove.add(currentapiRevisionDeployment); } } environmentsToAdd.add(apiRevisionDeployment.getDeployment()); gatewayVhosts.put(apiRevisionDeployment.getDeployment(), apiRevisionDeployment.getVhost()); } if (environmentsToRemove.size() > 0) { apiMgtDAO.removeAPIRevisionDeployment(apiId, environmentsToRemove); removeFromGateway(api, environmentsToRemove, environmentsToAdd); } GatewayArtifactsMgtDAO.getInstance() .addAndRemovePublishedGatewayLabels(apiId, apiRevisionId, environmentsToAdd, gatewayVhosts, environmentsToRemove); apiMgtDAO.addAPIRevisionDeployment(apiRevisionId, apiRevisionDeployments); if (environmentsToAdd.size() > 0) { // TODO remove this to organization once the microgateway can build gateway based on organization. gatewayManager.deployToGateway(api, tenantDomain, environmentsToAdd); } String publishedDefaultVersion = getPublishedDefaultVersion(apiIdentifier); String defaultVersion = getDefaultVersion(apiIdentifier); apiMgtDAO.updateDefaultAPIPublishedVersion(apiIdentifier); if (publishedDefaultVersion != null) { if (apiIdentifier.getVersion().equals(defaultVersion)) { api.setAsPublishedDefaultVersion(true); } if (api.isPublishedDefaultVersion() && !apiIdentifier.getVersion().equals(publishedDefaultVersion)) { APIIdentifier previousDefaultVersionIdentifier = new APIIdentifier(api.getId().getProviderName(), api.getId().getApiName(), publishedDefaultVersion); sendUpdateEventToPreviousDefaultVersion(previousDefaultVersionIdentifier, organization); } } } /** * Adds a new APIRevisionDeployment to an existing API * * @param apiId API UUID * @param apiRevisionUUID API Revision UUID * @param deployedAPIRevisionList List of APIRevisionDeployment objects * @throws APIManagementException if failed to add APIRevision */ @Override public void addDeployedAPIRevision(String apiId, String apiRevisionUUID, List<DeployedAPIRevision> deployedAPIRevisionList) throws APIManagementException { List<DeployedAPIRevision> currentDeployedApiRevisionList = apiMgtDAO.getDeployedAPIRevisionByApiUUID(apiId); Set<DeployedAPIRevision> environmentsToRemove = new HashSet<>(); // Deployments to add List<DeployedAPIRevision> environmentsToAdd = new ArrayList<>(); List<String> envNames = new ArrayList<>(); for (DeployedAPIRevision deployedAPIRevision : deployedAPIRevisionList) { // Remove duplicate entries for same revision uuid and env from incoming list if (!envNames.contains(deployedAPIRevision.getDeployment())) { envNames.add(deployedAPIRevision.getDeployment()); environmentsToAdd.add(deployedAPIRevision); // Remove old deployed-revision entries of same env and apiid from existing db records for (DeployedAPIRevision currentapiRevisionDeployment : currentDeployedApiRevisionList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), deployedAPIRevision.getDeployment())) { environmentsToRemove.add(currentapiRevisionDeployment); } } } } // Discard old deployment info if (environmentsToRemove.size() > 0) { apiMgtDAO.removeDeployedAPIRevision(apiId, environmentsToRemove); } // Add new deployed revision update to db if (deployedAPIRevisionList.size() > 0) { apiMgtDAO.addDeployedAPIRevision(apiRevisionUUID, environmentsToAdd); } } @Override public void updateAPIDisplayOnDevportal(String apiId, String apiRevisionId, APIRevisionDeployment apiRevisionDeployment) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> currentApiRevisionDeploymentList = apiMgtDAO.getAPIRevisionDeploymentsByApiUUID(apiId); Set<APIRevisionDeployment> environmentsToUpdate = new HashSet<>(); for (APIRevisionDeployment currentapiRevisionDeployment : currentApiRevisionDeploymentList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), apiRevisionDeployment.getDeployment())) { environmentsToUpdate.add(apiRevisionDeployment); } } // if the provided deployment doesn't exist we are not adding to update list if (environmentsToUpdate.size() > 0) { apiMgtDAO.updateAPIRevisionDeployment(apiId, environmentsToUpdate); } else { throw new APIMgtResourceNotFoundException("deployment with " + apiRevisionDeployment.getDeployment() + " not found", ExceptionCodes.from(ExceptionCodes.EXISTING_DEPLOYMENT_NOT_FOUND, apiRevisionDeployment.getDeployment())); } } private API getAPIbyUUID(String apiId, APIRevision apiRevision, String organization) throws APIManagementException { API api = getAPIbyUUID(apiRevision.getApiUUID(), organization); api.setRevisionedApiId(apiRevision.getRevisionUUID()); api.setRevisionId(apiRevision.getId()); api.setUuid(apiId); api.getId().setUuid(apiId); return api; } @Override public APIRevisionDeployment getAPIRevisionDeployment(String name, String revisionId) throws APIManagementException { return apiMgtDAO.getAPIRevisionDeploymentByNameAndRevsionID(name,revisionId); } @Override public List<APIRevisionDeployment> getAPIRevisionDeploymentList(String revisionUUID) throws APIManagementException { return apiMgtDAO.getAPIRevisionDeploymentByRevisionUUID(revisionUUID); } /** * Remove a new APIRevisionDeployment to an existing API * * @param apiId API UUID * @param apiRevisionId API Revision UUID * @param apiRevisionDeployments List of APIRevisionDeployment objects * @param organization * @throws APIManagementException if failed to add APIRevision */ @Override public void undeployAPIRevisionDeployment(String apiId, String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments, String organization) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } API api = getAPIbyUUID(apiId, apiRevision, organization); removeFromGateway(api, new HashSet<>(apiRevisionDeployments), Collections.emptySet()); apiMgtDAO.removeAPIRevisionDeployment(apiRevisionId, apiRevisionDeployments); GatewayArtifactsMgtDAO.getInstance().removePublishedGatewayLabels(apiId, apiRevisionId); } /** * Restore a provided API Revision as the current API of the API * * @param apiId API UUID * @param apiRevisionId API Revision UUID * @throws APIManagementException if failed to restore APIRevision */ @Override public void restoreAPIRevision(String apiId, String apiRevisionId, String organization) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } apiIdentifier.setUuid(apiId); try { apiPersistenceInstance.restoreAPIRevision(new Organization(organization), apiIdentifier.getUUID(), apiRevision.getRevisionUUID(), apiRevision.getId()); } catch (APIPersistenceException e) { String errorMessage = "Failed to restore registry artifacts"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. ERROR_RESTORING_API_REVISION,apiRevision.getApiUUID())); } apiMgtDAO.restoreAPIRevision(apiRevision); } /** * Delete an API Revision * * @param apiId API UUID * @param apiRevisionId API Revision UUID * @param organization identifier of the organization * @throws APIManagementException if failed to delete APIRevision */ @Override public void deleteAPIRevision(String apiId, String apiRevisionId, String organization) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> apiRevisionDeploymentsResponse = getAPIRevisionDeploymentList(apiRevisionId); if (apiRevisionDeploymentsResponse.size() != 0) { String errorMessage = "Couldn't delete API revision since API revision is currently deployed to a gateway" + "." + "You need to undeploy the API Revision from the gateway before attempting deleting API Revision: " + apiRevision.getRevisionUUID(); throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes. EXISTING_API_REVISION_DEPLOYMENT_FOUND, apiRevisionId)); } apiIdentifier.setUuid(apiId); try { apiPersistenceInstance.deleteAPIRevision(new Organization(organization), apiIdentifier.getUUID(), apiRevision.getRevisionUUID(), apiRevision.getId()); } catch (APIPersistenceException e) { String errorMessage = "Failed to delete registry artifacts"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. ERROR_DELETING_API_REVISION,apiRevision.getApiUUID())); } apiMgtDAO.deleteAPIRevision(apiRevision); gatewayArtifactsMgtDAO.deleteGatewayArtifact(apiRevision.getApiUUID(), apiRevision.getRevisionUUID()); if (artifactSaver != null) { try { artifactSaver.removeArtifact(apiRevision.getApiUUID(), apiIdentifier.getApiName(), apiIdentifier.getVersion(), apiRevision.getRevisionUUID(), organization); } catch (ArtifactSynchronizerException e) { log.error("Error while deleting Runtime artifacts from artifact Store", e); } } } @Override public String addAPIProductRevision(APIRevision apiRevision, String organization) throws APIManagementException { int revisionCountPerAPI = apiMgtDAO.getRevisionCountByAPI(apiRevision.getApiUUID()); if (revisionCountPerAPI > 4) { String errorMessage = "Maximum number of revisions per API Product has reached. " + "Need to remove stale revision to create a new Revision for API Product with id:" + apiRevision.getApiUUID(); throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes.MAXIMUM_REVISIONS_REACHED, apiRevision.getApiUUID())); } int revisionId = apiMgtDAO.getMostRecentRevisionId(apiRevision.getApiUUID()) + 1; apiRevision.setId(revisionId); APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiRevision.getApiUUID()); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiRevision.getApiUUID(), ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiRevision.getApiUUID())); } apiProductIdentifier.setUUID(apiRevision.getApiUUID()); String revisionUUID; try { revisionUUID = apiPersistenceInstance.addAPIRevision(new Organization(tenantDomain), apiProductIdentifier.getUUID(), revisionId); } catch (APIPersistenceException e) { String errorMessage = "Failed to add revision registry artifacts"; throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes. ERROR_CREATING_API_REVISION, apiRevision.getApiUUID())); } if (StringUtils.isEmpty(revisionUUID)) { String errorMessage = "Failed to retrieve revision uuid"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes.API_REVISION_UUID_NOT_FOUND)); } apiRevision.setRevisionUUID(revisionUUID); apiMgtDAO.addAPIProductRevision(apiRevision); try { File artifact = importExportAPI .exportAPIProduct(apiRevision.getApiUUID(), revisionUUID, true, ExportFormat.JSON, false, true, organization); gatewayArtifactsMgtDAO .addGatewayAPIArtifactAndMetaData(apiRevision.getApiUUID(),apiProductIdentifier.getName(), apiProductIdentifier.getVersion(), apiRevision.getRevisionUUID(), tenantDomain, APIConstants.API_PRODUCT, artifact); if (artifactSaver != null) { artifactSaver.saveArtifact(apiRevision.getApiUUID(), apiProductIdentifier.getName(), apiProductIdentifier.getVersion(), apiRevision.getRevisionUUID(), tenantDomain, artifact); } } catch (APIImportExportException | ArtifactSynchronizerException e) { throw new APIManagementException("Error while Store the Revision Artifact", ExceptionCodes.from(ExceptionCodes.API_REVISION_UUID_NOT_FOUND)); } return revisionUUID; } @Override public void deployAPIProductRevision(String apiProductId, String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } APIProduct product = getAPIProductbyUUID(apiRevisionId, tenantDomain); product.setUuid(apiProductId); List<APIRevisionDeployment> currentApiRevisionDeploymentList = apiMgtDAO.getAPIRevisionDeploymentsByApiUUID(apiProductId); APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); Set<String> environmentsToAdd = new HashSet<>(); Map<String, String> gatewayVhosts = new HashMap<>(); Set<APIRevisionDeployment> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeployments) { for (APIRevisionDeployment currentapiRevisionDeployment : currentApiRevisionDeploymentList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), apiRevisionDeployment.getDeployment())) { environmentsToRemove.add(currentapiRevisionDeployment); } } environmentsToAdd.add(apiRevisionDeployment.getDeployment()); gatewayVhosts.put(apiRevisionDeployment.getDeployment(), apiRevisionDeployment.getVhost()); } if (environmentsToRemove.size() > 0) { apiMgtDAO.removeAPIRevisionDeployment(apiProductId,environmentsToRemove); removeFromGateway(product, tenantDomain, environmentsToRemove, environmentsToAdd); } GatewayArtifactsMgtDAO.getInstance() .addAndRemovePublishedGatewayLabels(apiProductId, apiRevisionId, environmentsToAdd, gatewayVhosts, environmentsToRemove); apiMgtDAO.addAPIRevisionDeployment(apiRevisionId, apiRevisionDeployments); if (environmentsToAdd.size() > 0) { gatewayManager.deployToGateway(product, tenantDomain, environmentsToAdd); } } @Override public void updateAPIProductDisplayOnDevportal(String apiProductId, String apiRevisionId, APIRevisionDeployment apiRevisionDeployment) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> currentApiRevisionDeploymentList = apiMgtDAO.getAPIRevisionDeploymentsByApiUUID(apiProductId); Set<APIRevisionDeployment> environmentsToUpdate = new HashSet<>(); for (APIRevisionDeployment currentapiRevisionDeployment : currentApiRevisionDeploymentList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), apiRevisionDeployment.getDeployment())) { environmentsToUpdate.add(apiRevisionDeployment); } } // if the provided deployment doesn't exist we are not adding to update list if (environmentsToUpdate.size() > 0) { apiMgtDAO.updateAPIRevisionDeployment(apiProductId, environmentsToUpdate); } else { throw new APIMgtResourceNotFoundException("deployment with " + apiRevisionDeployment.getDeployment() + " not found", ExceptionCodes.from(ExceptionCodes.EXISTING_DEPLOYMENT_NOT_FOUND, apiRevisionDeployment.getDeployment())); } } @Override public void undeployAPIProductRevisionDeployment(String apiProductId, String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } APIProduct product = getAPIProductbyUUID(apiRevisionId, tenantDomain); product.setUuid(apiProductId); Set<String> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeployments) { environmentsToRemove.add(apiRevisionDeployment.getDeployment()); } product.setEnvironments(environmentsToRemove); removeFromGateway(product, tenantDomain, new HashSet<>(apiRevisionDeployments),Collections.emptySet()); apiMgtDAO.removeAPIRevisionDeployment(apiRevisionId, apiRevisionDeployments); GatewayArtifactsMgtDAO.getInstance().removePublishedGatewayLabels(apiProductId, apiRevisionId); } @Override public void restoreAPIProductRevision(String apiProductId, String apiRevisionId, String organization) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } apiProductIdentifier.setUUID(apiProductId); try { apiPersistenceInstance.restoreAPIRevision(new Organization(organization), apiProductIdentifier.getUUID(), apiRevision.getRevisionUUID(), apiRevision.getId()); } catch (APIPersistenceException e) { String errorMessage = "Failed to restore registry artifacts"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. ERROR_RESTORING_API_REVISION,apiRevision.getApiUUID())); } apiMgtDAO.restoreAPIProductRevision(apiRevision); } @Override public void deleteAPIProductRevision(String apiProductId, String apiRevisionId, String organization) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> apiRevisionDeploymentsResponse = getAPIRevisionDeploymentList(apiRevisionId); if (apiRevisionDeploymentsResponse.size() != 0) { String errorMessage = "Couldn't delete API revision since API revision is currently deployed to a gateway." + "You need to undeploy the API Revision from the gateway before attempting deleting API Revision: " + apiRevision.getRevisionUUID(); throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. EXISTING_API_REVISION_DEPLOYMENT_FOUND, apiRevisionId)); } apiProductIdentifier.setUUID(apiProductId); try { apiPersistenceInstance.deleteAPIRevision(new Organization(organization), apiProductIdentifier.getUUID(), apiRevision.getRevisionUUID(), apiRevision.getId()); } catch (APIPersistenceException e) { String errorMessage = "Failed to delete registry artifacts"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. ERROR_DELETING_API_REVISION,apiRevision.getApiUUID())); } apiMgtDAO.deleteAPIProductRevision(apiRevision); gatewayArtifactsMgtDAO.deleteGatewayArtifact(apiRevision.getApiUUID(), apiRevision.getRevisionUUID()); if (artifactSaver != null) { try { artifactSaver.removeArtifact(apiRevision.getApiUUID(), apiProductIdentifier.getName(), apiProductIdentifier.getVersion(), apiRevision.getRevisionUUID(), tenantDomain); } catch (ArtifactSynchronizerException e) { log.error("Error while deleting Runtime artifacts from artifact Store", e); } } } @Override public String generateApiKey(String apiId) throws APIManagementException { APIInfo apiInfo = apiMgtDAO.getAPIInfoByUUID(apiId); if (apiInfo == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with ID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } SubscribedApiDTO subscribedApiInfo = new SubscribedApiDTO(); subscribedApiInfo.setName(apiInfo.getName()); subscribedApiInfo.setContext(apiInfo.getContext()); subscribedApiInfo.setPublisher(apiInfo.getProvider()); subscribedApiInfo.setVersion(apiInfo.getVersion()); JwtTokenInfoDTO jwtTokenInfoDTO = new JwtTokenInfoDTO(); jwtTokenInfoDTO.setEndUserName(username); jwtTokenInfoDTO.setKeyType(APIConstants.API_KEY_TYPE_PRODUCTION); jwtTokenInfoDTO.setSubscribedApiDTOList(Arrays.asList(subscribedApiInfo)); jwtTokenInfoDTO.setExpirationTime(60 * 1000); ApiKeyGenerator apiKeyGenerator = new InternalAPIKeyGenerator(); return apiKeyGenerator.generateToken(jwtTokenInfoDTO); } @Override public List<APIRevisionDeployment> getAPIRevisionsDeploymentList(String apiId) throws APIManagementException { return apiMgtDAO.getAPIRevisionDeploymentByApiUUID(apiId); } }
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIProviderImpl.java
/* * Copyright (c) 2005-2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMException; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.Constants; import org.apache.axis2.util.JavaUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.solr.client.solrj.util.ClientUtils; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import org.wso2.carbon.CarbonConstants; import org.wso2.carbon.apimgt.api.APIDefinition; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.APIMgtResourceAlreadyExistsException; import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException; import org.wso2.carbon.apimgt.api.APIProvider; import org.wso2.carbon.apimgt.api.ErrorItem; import org.wso2.carbon.apimgt.api.ExceptionCodes; import org.wso2.carbon.apimgt.api.FaultGatewaysException; import org.wso2.carbon.apimgt.api.MonetizationException; import org.wso2.carbon.apimgt.api.UnsupportedPolicyTypeException; import org.wso2.carbon.apimgt.api.WorkflowResponse; import org.wso2.carbon.apimgt.api.doc.model.APIResource; import org.wso2.carbon.apimgt.api.dto.CertificateInformationDTO; import org.wso2.carbon.apimgt.api.dto.CertificateMetadataDTO; import org.wso2.carbon.apimgt.api.dto.ClientCertificateDTO; import org.wso2.carbon.apimgt.api.dto.KeyManagerConfigurationDTO; import org.wso2.carbon.apimgt.api.dto.UserApplicationAPIUsage; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APICategory; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIInfo; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.APIProductIdentifier; import org.wso2.carbon.apimgt.api.model.APIProductResource; import org.wso2.carbon.apimgt.api.model.APIRevision; import org.wso2.carbon.apimgt.api.model.APIRevisionDeployment; import org.wso2.carbon.apimgt.api.model.APIStateChangeResponse; import org.wso2.carbon.apimgt.api.model.APIStore; import org.wso2.carbon.apimgt.api.model.ApiTypeWrapper; import org.wso2.carbon.apimgt.api.model.BlockConditionsDTO; import org.wso2.carbon.apimgt.api.model.Comment; import org.wso2.carbon.apimgt.api.model.DeployedAPIRevision; import org.wso2.carbon.apimgt.api.model.Documentation; import org.wso2.carbon.apimgt.api.model.Documentation.DocumentSourceType; import org.wso2.carbon.apimgt.api.model.Documentation.DocumentVisibility; import org.wso2.carbon.apimgt.api.model.DocumentationContent; import org.wso2.carbon.apimgt.api.model.DocumentationType; import org.wso2.carbon.apimgt.api.model.EndpointSecurity; import org.wso2.carbon.apimgt.api.model.Identifier; import org.wso2.carbon.apimgt.api.model.KeyManager; import org.wso2.carbon.apimgt.api.model.LifeCycleEvent; import org.wso2.carbon.apimgt.api.model.Mediation; import org.wso2.carbon.apimgt.api.model.Monetization; import org.wso2.carbon.apimgt.api.model.Provider; import org.wso2.carbon.apimgt.api.model.ResourceFile; import org.wso2.carbon.apimgt.api.model.ResourcePath; import org.wso2.carbon.apimgt.api.model.Scope; import org.wso2.carbon.apimgt.api.model.SharedScopeUsage; import org.wso2.carbon.apimgt.api.model.SubscribedAPI; import org.wso2.carbon.apimgt.api.model.Subscriber; import org.wso2.carbon.apimgt.api.model.SwaggerData; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.URITemplate; import org.wso2.carbon.apimgt.api.model.Usage; import org.wso2.carbon.apimgt.api.model.policy.APIPolicy; import org.wso2.carbon.apimgt.api.model.policy.ApplicationPolicy; import org.wso2.carbon.apimgt.api.model.policy.Condition; import org.wso2.carbon.apimgt.api.model.policy.GlobalPolicy; import org.wso2.carbon.apimgt.api.model.policy.Pipeline; import org.wso2.carbon.apimgt.api.model.policy.Policy; import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants; import org.wso2.carbon.apimgt.api.model.policy.SubscriptionPolicy; import org.wso2.carbon.apimgt.impl.certificatemgt.CertificateManager; import org.wso2.carbon.apimgt.impl.certificatemgt.CertificateManagerImpl; import org.wso2.carbon.apimgt.impl.certificatemgt.ResponseCode; import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO; import org.wso2.carbon.apimgt.impl.dao.GatewayArtifactsMgtDAO; import org.wso2.carbon.apimgt.impl.dao.ServiceCatalogDAO; import org.wso2.carbon.apimgt.impl.definitions.GraphQLSchemaDefinition; import org.wso2.carbon.apimgt.impl.definitions.OAS3Parser; import org.wso2.carbon.apimgt.impl.definitions.OASParserUtil; import org.wso2.carbon.apimgt.impl.dto.JwtTokenInfoDTO; import org.wso2.carbon.apimgt.impl.dto.KeyManagerDto; import org.wso2.carbon.apimgt.impl.dto.SubscribedApiDTO; import org.wso2.carbon.apimgt.impl.dto.ThrottleProperties; import org.wso2.carbon.apimgt.impl.dto.TierPermissionDTO; import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO; import org.wso2.carbon.apimgt.impl.dto.WorkflowProperties; import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder; import org.wso2.carbon.apimgt.impl.gatewayartifactsynchronizer.ArtifactSaver; import org.wso2.carbon.apimgt.impl.gatewayartifactsynchronizer.exception.ArtifactSynchronizerException; import org.wso2.carbon.apimgt.impl.importexport.APIImportExportException; import org.wso2.carbon.apimgt.impl.importexport.ExportFormat; import org.wso2.carbon.apimgt.impl.importexport.ImportExportAPI; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.monetization.DefaultMonetizationImpl; import org.wso2.carbon.apimgt.impl.notification.NotificationDTO; import org.wso2.carbon.apimgt.impl.notification.NotificationExecutor; import org.wso2.carbon.apimgt.impl.notification.NotifierConstants; import org.wso2.carbon.apimgt.impl.notification.exception.NotificationException; import org.wso2.carbon.apimgt.impl.notifier.events.APIEvent; import org.wso2.carbon.apimgt.impl.notifier.events.APIPolicyEvent; import org.wso2.carbon.apimgt.impl.notifier.events.ApplicationPolicyEvent; import org.wso2.carbon.apimgt.impl.notifier.events.CertificateEvent; import org.wso2.carbon.apimgt.impl.notifier.events.GlobalPolicyEvent; import org.wso2.carbon.apimgt.impl.notifier.events.ScopeEvent; import org.wso2.carbon.apimgt.impl.notifier.events.SubscriptionEvent; import org.wso2.carbon.apimgt.impl.notifier.events.SubscriptionPolicyEvent; import org.wso2.carbon.apimgt.impl.publishers.WSO2APIPublisher; import org.wso2.carbon.apimgt.impl.recommendationmgt.RecommendationEnvironment; import org.wso2.carbon.apimgt.impl.recommendationmgt.RecommenderDetailsExtractor; import org.wso2.carbon.apimgt.impl.recommendationmgt.RecommenderEventPublisher; import org.wso2.carbon.apimgt.impl.token.ApiKeyGenerator; import org.wso2.carbon.apimgt.impl.token.ClaimsRetriever; import org.wso2.carbon.apimgt.impl.token.InternalAPIKeyGenerator; import org.wso2.carbon.apimgt.impl.utils.APIAuthenticationAdminClient; import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader; import org.wso2.carbon.apimgt.impl.utils.APINameComparator; import org.wso2.carbon.apimgt.impl.utils.APIProductNameComparator; import org.wso2.carbon.apimgt.impl.utils.APIStoreNameComparator; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.impl.utils.APIVersionComparator; import org.wso2.carbon.apimgt.impl.utils.APIVersionStringComparator; import org.wso2.carbon.apimgt.impl.utils.ContentSearchResultNameComparator; import org.wso2.carbon.apimgt.impl.workflow.APIStateWorkflowDTO; import org.wso2.carbon.apimgt.impl.workflow.WorkflowConstants; import org.wso2.carbon.apimgt.impl.workflow.WorkflowException; import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutor; import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutorFactory; import org.wso2.carbon.apimgt.impl.workflow.WorkflowStatus; import org.wso2.carbon.apimgt.impl.wsdl.WSDLProcessor; import org.wso2.carbon.apimgt.persistence.LCManagerFactory; import org.wso2.carbon.apimgt.persistence.dto.DocumentContent; import org.wso2.carbon.apimgt.persistence.dto.DocumentSearchContent; import org.wso2.carbon.apimgt.persistence.dto.DocumentSearchResult; import org.wso2.carbon.apimgt.persistence.dto.MediationInfo; import org.wso2.carbon.apimgt.persistence.dto.Organization; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPI; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPIInfo; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPIProduct; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPIProductInfo; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPIProductSearchResult; import org.wso2.carbon.apimgt.persistence.dto.PublisherAPISearchResult; import org.wso2.carbon.apimgt.persistence.dto.PublisherContentSearchResult; import org.wso2.carbon.apimgt.persistence.dto.PublisherSearchContent; import org.wso2.carbon.apimgt.persistence.dto.SearchContent; import org.wso2.carbon.apimgt.persistence.dto.UserContext; import org.wso2.carbon.apimgt.persistence.exceptions.APIPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.AsyncSpecPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.DocumentationPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.GraphQLPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.MediationPolicyPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.OASPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.PersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.ThumbnailPersistenceException; import org.wso2.carbon.apimgt.persistence.exceptions.WSDLPersistenceException; import org.wso2.carbon.apimgt.persistence.mapper.APIMapper; import org.wso2.carbon.apimgt.persistence.mapper.APIProductMapper; import org.wso2.carbon.apimgt.persistence.mapper.DocumentMapper; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.databridge.commons.Event; import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact; import org.wso2.carbon.governance.api.exception.GovernanceException; import org.wso2.carbon.governance.api.generic.GenericArtifactManager; import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact; import org.wso2.carbon.governance.api.util.GovernanceUtils; import org.wso2.carbon.governance.custom.lifecycles.checklist.beans.LifecycleBean; import org.wso2.carbon.governance.custom.lifecycles.checklist.util.CheckListItem; import org.wso2.carbon.governance.custom.lifecycles.checklist.util.LifecycleBeanPopulator; import org.wso2.carbon.governance.custom.lifecycles.checklist.util.Property; import org.wso2.carbon.governance.lcm.util.CommonUtil; import org.wso2.carbon.registry.core.ActionConstants; import org.wso2.carbon.registry.core.Association; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.config.RegistryContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.realm.RegistryAuthorizationManager; import org.wso2.carbon.registry.core.pagination.PaginationContext; import org.wso2.carbon.registry.core.service.RegistryService; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.user.api.AuthorizationManager; import org.wso2.carbon.user.api.UserStoreException; import org.wso2.carbon.user.api.UserStoreManager; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.SortedSet; import java.util.StringTokenizer; import java.util.TreeSet; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import javax.cache.Cache; import javax.cache.Caching; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; /** * This class provides the core API provider functionality. It is implemented in a very * self-contained and 'pure' manner, without taking requirements like security into account, * which are subject to frequent change. Due to this 'pure' nature and the significance of * the class to the overall API management functionality, the visibility of the class has * been reduced to package level. This means we can still use it for internal purposes and * possibly even extend it, but it's totally off the limits of the users. Users wishing to * pragmatically access this functionality should use one of the extensions of this * class which is visible to them. These extensions may add additional features like * security to this class. */ class APIProviderImpl extends AbstractAPIManager implements APIProvider { private static final Log log = LogFactory.getLog(APIProviderImpl.class); private ServiceCatalogDAO serviceCatalogDAO = ServiceCatalogDAO.getInstance(); private final String userNameWithoutChange; private CertificateManager certificateManager; protected ArtifactSaver artifactSaver; protected ImportExportAPI importExportAPI; protected GatewayArtifactsMgtDAO gatewayArtifactsMgtDAO; private RecommendationEnvironment recommendationEnvironment; public APIProviderImpl(String username) throws APIManagementException { super(username); this.userNameWithoutChange = username; certificateManager = CertificateManagerImpl.getInstance(); this.artifactSaver = ServiceReferenceHolder.getInstance().getArtifactSaver(); this.importExportAPI = ServiceReferenceHolder.getInstance().getImportExportService(); this.gatewayArtifactsMgtDAO = GatewayArtifactsMgtDAO.getInstance(); this.recommendationEnvironment = ServiceReferenceHolder.getInstance().getAPIManagerConfigurationService() .getAPIManagerConfiguration().getApiRecommendationEnvironment(); } protected String getUserNameWithoutChange() { return userNameWithoutChange; } /** * Returns a list of all #{@link org.wso2.carbon.apimgt.api.model.Provider} available on the system. * * @return Set<Provider> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get Providers */ @Override public Set<Provider> getAllProviders() throws APIManagementException { Set<Provider> providerSet = new HashSet<Provider>(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.PROVIDER_KEY); try { if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when fetching providers."; log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact[] genericArtifact = artifactManager.getAllGenericArtifacts(); if (genericArtifact == null || genericArtifact.length == 0) { return providerSet; } for (GenericArtifact artifact : genericArtifact) { Provider provider = new Provider(artifact.getAttribute(APIConstants.PROVIDER_OVERVIEW_NAME)); provider.setDescription(APIConstants.PROVIDER_OVERVIEW_DESCRIPTION); provider.setEmail(APIConstants.PROVIDER_OVERVIEW_EMAIL); providerSet.add(provider); } } catch (GovernanceException e) { handleException("Failed to get all providers", e); } return providerSet; } /** * Get a list of APIs published by the given provider. If a given API has multiple APIs, * only the latest version will * be included in this list. * * @param providerId , provider id * @return set of API * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get set of API */ @Override public List<API> getAPIsByProvider(String providerId) throws APIManagementException { List<API> apiSortedList = new ArrayList<API>(); try { providerId = APIUtil.replaceEmailDomain(providerId); String providerPath = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + providerId; GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); Association[] associations = registry.getAssociations(providerPath, APIConstants.PROVIDER_ASSOCIATION); for (Association association : associations) { String apiPath = association.getDestinationPath(); if (registry.resourceExists(apiPath)) { Resource resource = registry.get(apiPath); String apiArtifactId = resource.getUUID(); if (apiArtifactId != null) { GenericArtifact apiArtifact = artifactManager.getGenericArtifact(apiArtifactId); if (apiArtifact != null) { String type = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_TYPE); if (!APIConstants.API_PRODUCT.equals(type)) { apiSortedList.add(getAPI(apiArtifact)); } } } else { throw new GovernanceException("artifact id is null of " + apiPath); } } } } catch (RegistryException e) { handleException("Failed to get APIs for provider : " + providerId, e); } Collections.sort(apiSortedList, new APINameComparator()); return apiSortedList; } /** * Get a list of all the consumers for all APIs * * @param providerId if of the provider * @return Set<Subscriber> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get subscribed APIs of given provider */ @Override @Deprecated public Set<Subscriber> getSubscribersOfProvider(String providerId) throws APIManagementException { Set<Subscriber> subscriberSet = null; try { subscriberSet = apiMgtDAO.getSubscribersOfProvider(providerId); } catch (APIManagementException e) { handleException("Failed to get Subscribers for : " + providerId, e); } return subscriberSet; } /** * get details of provider * * @param providerName name of the provider * @return Provider * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get Provider */ @Override public Provider getProvider(String providerName) throws APIManagementException { Provider provider = null; String providerPath = APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + APIConstants.PROVIDERS_PATH + RegistryConstants.PATH_SEPARATOR + providerName; try { GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.PROVIDER_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when getting provider " + providerName; log.error(errorMessage); throw new APIManagementException(errorMessage); } Resource providerResource = registry.get(providerPath); String artifactId = providerResource.getUUID(); if (artifactId == null) { throw new APIManagementException("artifact it is null"); } GenericArtifact providerArtifact = artifactManager.getGenericArtifact(artifactId); provider = APIUtil.getProvider(providerArtifact); } catch (RegistryException e) { handleException("Failed to get Provider form : " + providerName, e); } return provider; } /** * Return Usage of given APIIdentifier * * @param apiIdentifier APIIdentifier * @return Usage */ @Override public Usage getUsageByAPI(APIIdentifier apiIdentifier) { return null; } /** * Return Usage of given provider and API * * @param providerId if of the provider * @param apiName name of the API * @return Usage */ @Override public Usage getAPIUsageByUsers(String providerId, String apiName) { return null; } /** * Returns usage details of all APIs published by a provider * * @param providerName Provider Id * @return UserApplicationAPIUsages for given provider * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to get UserApplicationAPIUsage */ @Override public UserApplicationAPIUsage[] getAllAPIUsageByProvider(String providerName) throws APIManagementException { return apiMgtDAO.getAllAPIUsageByProvider(providerName); } /** * Returns usage details of a particular API * * @param uuid API uuid * @param organization identifier of the organization * @return UserApplicationAPIUsages for given provider * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to get UserApplicationAPIUsage */ @Override public List<SubscribedAPI> getAPIUsageByAPIId(String uuid, String organization) throws APIManagementException { APIIdentifier identifier = apiMgtDAO.getAPIIdentifierFromUUID(uuid); List<SubscribedAPI> subscribedAPIs = new ArrayList<>(); if (identifier != null) { APIIdentifier apiIdEmailReplaced = new APIIdentifier(APIUtil.replaceEmailDomain(identifier.getProviderName()), identifier.getApiName(), identifier.getVersion()); UserApplicationAPIUsage[] allApiResult = apiMgtDAO.getAllAPIUsageByProviderAndApiId(uuid, organization); for (UserApplicationAPIUsage usage : allApiResult) { for (SubscribedAPI apiSubscription : usage.getApiSubscriptions()) { APIIdentifier subsApiId = apiSubscription.getApiId(); APIIdentifier subsApiIdEmailReplaced = new APIIdentifier( APIUtil.replaceEmailDomain(subsApiId.getProviderName()), subsApiId.getApiName(), subsApiId.getVersion()); if (subsApiIdEmailReplaced.equals(apiIdEmailReplaced)) { subscribedAPIs.add(apiSubscription); } } } } return subscribedAPIs; } /** * Returns usage details of a particular API * * @param apiProductId API Product identifier * @return UserApplicationAPIUsages for given provider * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to get UserApplicationAPIUsage */ @Override public List<SubscribedAPI> getAPIProductUsageByAPIProductId(APIProductIdentifier apiProductId) throws APIManagementException { APIProductIdentifier apiIdEmailReplaced = new APIProductIdentifier(APIUtil.replaceEmailDomain(apiProductId.getProviderName()), apiProductId.getName(), apiProductId.getVersion()); UserApplicationAPIUsage[] allApiProductResult = apiMgtDAO.getAllAPIProductUsageByProvider(apiProductId.getProviderName()); List<SubscribedAPI> subscribedAPIs = new ArrayList<>(); for (UserApplicationAPIUsage usage : allApiProductResult) { for (SubscribedAPI apiSubscription : usage.getApiSubscriptions()) { APIProductIdentifier subsApiProductId = apiSubscription.getProductId(); APIProductIdentifier subsApiProductIdEmailReplaced = new APIProductIdentifier( APIUtil.replaceEmailDomain(subsApiProductId.getProviderName()), subsApiProductId.getName(), subsApiProductId.getVersion()); if (subsApiProductIdEmailReplaced.equals(apiIdEmailReplaced)) { subscribedAPIs.add(apiSubscription); } } } return subscribedAPIs; } /** * Shows how a given consumer uses the given API. * * @param apiIdentifier APIIdentifier * @param consumerEmail E-mal Address of consumer * @return Usage */ @Override public Usage getAPIUsageBySubscriber(APIIdentifier apiIdentifier, String consumerEmail) { return null; } /** * Returns full list of Subscribers of an API * * @param identifier APIIdentifier * @return Set<Subscriber> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get Subscribers */ @Override public Set<Subscriber> getSubscribersOfAPI(APIIdentifier identifier) throws APIManagementException { Set<Subscriber> subscriberSet = null; try { subscriberSet = apiMgtDAO.getSubscribersOfAPI(identifier); } catch (APIManagementException e) { handleException("Failed to get subscribers for API : " + identifier.getApiName(), e); } return subscriberSet; } /** * Returns full list of subscriptions of an API * * @param apiName Name of the API * @param apiVersion Version of the API * @param provider Name of API creator * @return All subscriptions of a given API * @throws APIManagementException if failed to get Subscribers */ public List<SubscribedAPI> getSubscriptionsOfAPI(String apiName, String apiVersion, String provider) throws APIManagementException { return apiMgtDAO.getSubscriptionsOfAPI(apiName, apiVersion, provider); } /** * this method returns the Set<APISubscriptionCount> for given provider and api * * @param identifier APIIdentifier * @return Set<APISubscriptionCount> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get APISubscriptionCountByAPI */ @Override public long getAPISubscriptionCountByAPI(APIIdentifier identifier) throws APIManagementException { long count = 0L; try { count = apiMgtDAO.getAPISubscriptionCountByAPI(identifier); } catch (APIManagementException e) { handleException("Failed to get APISubscriptionCount for: " + identifier.getApiName(), e); } return count; } private OMElement createThrottlePolicy(Tier tier) throws APIManagementException { OMElement throttlePolicy = null; String policy = APIConstants.THROTTLE_POLICY_TEMPLATE; StringBuilder attributeBuilder = new StringBuilder(); Map<String, Object> tierAttributes = tier.getTierAttributes(); if (tierAttributes != null) { for (Map.Entry<String, Object> entry : tierAttributes.entrySet()) { if (entry.getValue() instanceof String) { String attributeName = entry.getKey().trim(); String attributeValue = ((String) entry.getValue()).trim(); // We see whether the attribute name is empty. if (!attributeName.isEmpty()) { attributeBuilder.append(String.format(APIConstants.THROTTLE_POLICY_ATTRIBUTE_TEMPLATE, attributeName, attributeValue, attributeName)); } } else { if (log.isDebugEnabled()) { log.debug("Unrecognized throttle attribute value : " + entry.getValue() + " of attribute name : " + entry.getKey()); } } } } // We add the "description", "billing plan" and "stop on quota reach" as custom attributes attributeBuilder.append(String.format(APIConstants.THROTTLE_POLICY_ATTRIBUTE_TEMPLATE, APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE, tier.getDescription().trim(), APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE)); attributeBuilder.append(String.format(APIConstants.THROTTLE_POLICY_ATTRIBUTE_TEMPLATE, APIConstants.THROTTLE_TIER_PLAN_ATTRIBUTE, tier.getTierPlan().trim(), APIConstants.THROTTLE_TIER_PLAN_ATTRIBUTE)); attributeBuilder.append(String.format(APIConstants.THROTTLE_POLICY_ATTRIBUTE_TEMPLATE, APIConstants.THROTTLE_TIER_QUOTA_ACTION_ATTRIBUTE, String.valueOf(tier.isStopOnQuotaReached()), APIConstants.THROTTLE_TIER_QUOTA_ACTION_ATTRIBUTE)); // Note: We assume that the unit time is in milliseconds. policy = String.format(policy, tier.getName(), tier.getRequestCount(), tier.getUnitTime(), attributeBuilder.toString()); try { throttlePolicy = AXIOMUtil.stringToOM(policy); } catch (XMLStreamException e) { handleException("Invalid policy xml generated", e); } return throttlePolicy; } /** * Adds a new API to the Store * * @param api API * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to add API */ public API addAPI(API api) throws APIManagementException { validateApiInfo(api); String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); validateResourceThrottlingTiers(api, tenantDomain); validateKeyManagers(api); if (api.isEndpointSecured() && StringUtils.isEmpty(api.getEndpointUTPassword())) { String errorMessage = "Empty password is given for endpointSecurity when creating API " + api.getId().getApiName(); throw new APIManagementException(errorMessage); } //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); RegistryService registryService = ServiceReferenceHolder.getInstance().getRegistryService(); //Add default API LC if it is not there try { if (!CommonUtil.lifeCycleExists(APIConstants.API_LIFE_CYCLE, registryService.getConfigSystemRegistry(tenantId))) { String defaultLifecyclePath = CommonUtil.getDefaltLifecycleConfigLocation() + File.separator + APIConstants.API_LIFE_CYCLE + APIConstants.XML_EXTENSION; File file = new File(defaultLifecyclePath); String content = null; if (file != null && file.exists()) { content = FileUtils.readFileToString(file); } if (content != null) { CommonUtil.addLifecycle(content, registryService.getConfigSystemRegistry(tenantId), CommonUtil.getRootSystemRegistry(tenantId)); } } } catch (RegistryException e) { handleException("Error occurred while adding default APILifeCycle.", e); } catch (IOException e) { handleException("Error occurred while loading APILifeCycle.xml.", e); } catch (XMLStreamException e) { handleException("Error occurred while adding default API LifeCycle.", e); } try { PublisherAPI addedAPI = apiPersistenceInstance.addAPI(new Organization(api.getOrganization()), APIMapper.INSTANCE.toPublisherApi(api)); api.setUuid(addedAPI.getId()); api.setCreatedTime(addedAPI.getCreatedTime()); } catch (APIPersistenceException e) { throw new APIManagementException("Error while persisting API ", e); } if (log.isDebugEnabled()) { log.debug("API details successfully added to the registry. API Name: " + api.getId().getApiName() + ", API Version : " + api.getId().getVersion() + ", API context : " + api.getContext()); } int tenantId = APIUtil.getInternalOrganizationId(api.getOrganization()); addAPI(api, tenantId); JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, api.getId().getApiName()); apiLogObject.put(APIConstants.AuditLogConstants.CONTEXT, api.getContext()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, api.getId().getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, api.getId().getProviderName()); APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API, apiLogObject.toString(), APIConstants.AuditLogConstants.CREATED, this.username); if (log.isDebugEnabled()) { log.debug("API details successfully added to the API Manager Database. API Name: " + api.getId() .getApiName() + ", API Version : " + api.getId().getVersion() + ", API context : " + api .getContext()); } if (APIUtil.isAPIManagementEnabled()) { Cache contextCache = APIUtil.getAPIContextCache(); Boolean apiContext = null; Object cachedObject = contextCache.get(api.getContext()); if (cachedObject != null) { apiContext = Boolean.valueOf(cachedObject.toString()); } if (apiContext == null) { contextCache.put(api.getContext(), Boolean.TRUE); } } if ("null".equals(api.getAccessControlRoles())) { api.setAccessControlRoles(null); } //notify key manager with API addition registerOrUpdateResourceInKeyManager(api, tenantDomain); return api; } /** * Add API metadata, local scopes and URI templates to the database and KeyManager. * * @param api API to add * @param tenantId Tenant Id * @throws APIManagementException if an error occurs while adding the API */ private void addAPI(API api, int tenantId) throws APIManagementException { int apiId = apiMgtDAO.addAPI(api, tenantId, api.getOrganization()); addLocalScopes(api.getUuid(), api.getUriTemplates(), api.getOrganization()); addURITemplates(apiId, api, tenantId); String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_CREATE.name(), tenantId, tenantDomain, api.getId().getApiName(), apiId, api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); } /** * Add local scopes for the API if the scopes does not exist as shared scopes. The local scopes to add will be * take from the URI templates. * * @param uuid API uuid * @param uriTemplates URI Templates * @param organization Organization * @throws APIManagementException if fails to add local scopes for the API */ private void addLocalScopes(String uuid, Set<URITemplate> uriTemplates, String organization) throws APIManagementException { int tenantId = APIUtil.getInternalOrganizationId(organization); String tenantDomain = APIUtil.getTenantDomainFromTenantId(tenantId); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); //Get the local scopes set to register for the API from URI templates Set<Scope> scopesToRegister = getScopesToRegisterFromURITemplates(uuid, organization, uriTemplates); //Register scopes for (Scope scope : scopesToRegister) { for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { String scopeKey = scope.getKey(); try { // Check if key already registered in KM. Scope Key may be already registered for a different // version. if (!keyManager.isScopeExists(scopeKey)) { //register scope in KM keyManager.registerScope(scope); } else { if (log.isDebugEnabled()) { log.debug("Scope: " + scopeKey + " already registered in KM. Skipping registering scope."); } } } catch (APIManagementException e) { log.error("Error while registering Scope " + scopeKey + "in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } addScopes(scopesToRegister, tenantId); } /** * Extract the scopes set from URI templates which needs to be registered as local scopes for the API. * * @param uuid API uuid * @param organization Organization * @param uriTemplates URI templates * @return Local Scopes set to register * @throws APIManagementException if fails to extract Scopes from URI templates */ private Set<Scope> getScopesToRegisterFromURITemplates(String uuid, String organization, Set<URITemplate> uriTemplates) throws APIManagementException { int tenantId = APIUtil.getInternalOrganizationId(organization); Set<Scope> scopesToRegister = new HashSet<>(); Set<Scope> uriTemplateScopes = new HashSet<>(); //Get the attached scopes set from the URI templates for (URITemplate uriTemplate : uriTemplates) { List<Scope> scopesFromURITemplate = uriTemplate.retrieveAllScopes(); for (Scope scopeFromURITemplate : scopesFromURITemplate) { if (scopeFromURITemplate == null) { continue; // No scopes attached for the URI Template } uriTemplateScopes.add(scopeFromURITemplate); } } //Validate and extract only the local scopes which need to be registered in KM for (Scope scope : uriTemplateScopes) { String scopeKey = scope.getKey(); //Check if it an existing shared scope, if so skip adding scope if (!isSharedScopeNameExists(scopeKey, tenantId)) { // Check if scope key is already assigned locally to a different API (Other than different versions of // the same API). if (!isScopeKeyAssignedLocally(uuid, scope.getKey(), organization)) { scopesToRegister.add(scope); } else { throw new APIManagementException("Error while adding local scopes for API with UUID " + uuid + ". Scope: " + scopeKey + " already assigned locally for a different API."); } } else if (log.isDebugEnabled()) { log.debug("Scope " + scopeKey + " exists as a shared scope. Skip adding as a local scope."); } } return scopesToRegister; } /** * Add URI templates for the API. * * @param apiId API Id * @param api API * @param tenantId Tenant Id * @throws APIManagementException if fails to add URI templates for the API */ private void addURITemplates(int apiId, API api, int tenantId) throws APIManagementException { String tenantDomain = APIUtil.getTenantDomainFromTenantId(tenantId); apiMgtDAO.addURITemplates(apiId, api, tenantId); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.attachResourceScopes(api, api.getUriTemplates()); } catch (APIManagementException e) { log.error("Error while Attaching Resource to scope in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } /** * Notify the key manager with API update or addition * * @param api API * @param tenantDomain * @throws APIManagementException when error occurs when register/update API at Key Manager side */ private void registerOrUpdateResourceInKeyManager(API api, String tenantDomain) throws APIManagementException { //get new key manager instance for resource registration. Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { Map registeredResource = keyManager.getResourceByApiId(api.getId().toString()); if (registeredResource == null) { boolean isNewResourceRegistered = keyManager.registerNewResource(api, null); if (!isNewResourceRegistered) { log.warn("APIResource registration is failed while adding the API- " + api.getId().getApiName() + "-" + api.getId().getVersion() + " into Key Manager : " + keyManagerDtoEntry.getKey()); } } else { //update APIResource. String resourceId = (String) registeredResource.get("resourceId"); if (resourceId == null) { handleException("APIResource update is failed because of empty resourceID."); } keyManager.updateRegisteredResource(api, registeredResource); } } catch (APIManagementException e) { log.error("API Resource Registration failed in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } /** * Validates the name and version of api against illegal characters. * * @param api API info object * @throws APIManagementException */ private void validateApiInfo(API api) throws APIManagementException { String apiName = api.getId().getApiName(); String apiVersion = api.getId().getVersion(); if (apiName == null) { handleException("API Name is required."); } else if (containsIllegals(apiName)) { handleException("API Name contains one or more illegal characters " + "( " + APIConstants.REGEX_ILLEGAL_CHARACTERS_FOR_API_METADATA + " )"); } if (apiVersion == null) { handleException("API Version is required."); } else if (containsIllegals(apiVersion)) { handleException("API Version contains one or more illegal characters " + "( " + APIConstants.REGEX_ILLEGAL_CHARACTERS_FOR_API_METADATA + " )"); } if (!hasValidLength(apiName, APIConstants.MAX_LENGTH_API_NAME) || !hasValidLength(apiVersion, APIConstants.MAX_LENGTH_VERSION) || !hasValidLength(api.getId().getProviderName(), APIConstants.MAX_LENGTH_PROVIDER) || !hasValidLength(api.getContext(), APIConstants.MAX_LENGTH_CONTEXT) ) { throw new APIManagementException("Character length exceeds the allowable limit", ExceptionCodes.LENGTH_EXCEEDS); } } public void deleteSubscriptionBlockCondition(String conditionValue) throws APIManagementException { BlockConditionsDTO blockCondition = apiMgtDAO.getSubscriptionBlockCondition(conditionValue, tenantDomain); if (blockCondition != null) { deleteBlockConditionByUUID(blockCondition.getUUID()); } } /** * This method is used to get the context of API identified by the given uuid * * @param uuid api uuid * @return apiContext * @throws APIManagementException if failed to fetch the context for api uuid */ public String getAPIContext(String uuid) throws APIManagementException { return apiMgtDAO.getAPIContext(uuid); } /** * Check whether a string contains illegal charactersA * * @param toExamine string to examine for illegal characters * @return true if found illegal characters, else false */ public boolean containsIllegals(String toExamine) { Pattern pattern = Pattern.compile(APIConstants.REGEX_ILLEGAL_CHARACTERS_FOR_API_METADATA); Matcher matcher = pattern.matcher(toExamine); return matcher.find(); } /** * Check whether the provided information exceeds the maximum length * @param field text field to validate * @param maxLength maximum allowd length * @return true if the length is valid */ public boolean hasValidLength(String field, int maxLength) { return field.length() <= maxLength; } /** * Persist API Status into a property of API Registry resource * * @param artifactId API artifact ID * @param apiStatus Current status of the API * @throws APIManagementException on error */ private void saveAPIStatus(String artifactId, String apiStatus) throws APIManagementException { try { Resource resource = registry.get(artifactId); if (resource != null) { String propValue = resource.getProperty(APIConstants.API_STATUS); if (propValue == null) { resource.addProperty(APIConstants.API_STATUS, apiStatus); } else { resource.setProperty(APIConstants.API_STATUS, apiStatus); } registry.put(artifactId, resource); } } catch (RegistryException e) { handleException("Error while adding API", e); } } @Override public String getDefaultVersion(APIIdentifier apiid) throws APIManagementException { String defaultVersion = null; try { defaultVersion = apiMgtDAO.getDefaultVersion(apiid); } catch (APIManagementException e) { handleException("Error while getting default version :" + apiid.getApiName(), e); } return defaultVersion; } public String getPublishedDefaultVersion(APIIdentifier apiid) throws APIManagementException { String defaultVersion = null; try { defaultVersion = apiMgtDAO.getPublishedDefaultVersion(apiid); } catch (APIManagementException e) { handleException("Error while getting published default version :" + apiid.getApiName(), e); } return defaultVersion; } /** * This method is used to save the wsdl file in the registry * This is used when user starts api creation with a soap endpoint * * @param api api object * @throws APIManagementException * @throws RegistryException */ public void updateWsdlFromUrl(API api) throws APIManagementException { boolean transactionCommitted = false; try { registry.beginTransaction(); String apiArtifactId = registry.get(APIUtil.getAPIPath(api.getId())).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Artifact manager is null when updating WSDL of API " + api.getId().getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId); GenericArtifact apiArtifact = APIUtil.createAPIArtifactContent(artifact, api); String artifactPath = GovernanceUtils.getArtifactPath(registry, apiArtifact.getId()); if (APIUtil.isValidWSDLURL(api.getWsdlUrl(), false)) { String path = APIUtil.createWSDL(registry, api); updateWSDLUriInAPIArtifact(path, artifactManager, apiArtifact, artifactPath); } registry.commitTransaction(); transactionCommitted = true; } catch (RegistryException e) { try { registry.rollbackTransaction(); } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } throw new APIManagementException("Error occurred while saving the wsdl in the registry.", e); } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } } } public void updateWsdlFromResourceFile(API api) throws APIManagementException { boolean transactionCommitted = false; try { registry.beginTransaction(); String apiArtifactId = registry.get(APIUtil.getAPIPath(api.getId())).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Artifact manager is null when updating WSDL of API " + api.getId().getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId); GenericArtifact apiArtifact = APIUtil.createAPIArtifactContent(artifact, api); String artifactPath = GovernanceUtils.getArtifactPath(registry, apiArtifact.getId()); if (api.getWsdlResource() != null) { String path = APIUtil.saveWSDLResource(registry, api); apiArtifact.setAttribute(APIConstants.API_OVERVIEW_WSDL, api.getWsdlUrl()); //reset the wsdl path artifactManager.updateGenericArtifact(apiArtifact); //update the artifact registry.commitTransaction(); transactionCommitted = true; } } catch (RegistryException e) { try { registry.rollbackTransaction(); } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } } } public boolean isAPIUpdateValid(API api) throws APIManagementException { String apiSourcePath = APIUtil.getAPIPath(api.getId()); boolean isValid = false; try { Resource apiSourceArtifact = registry.get(apiSourcePath); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when checking validity of API update for " + api.getId() .getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiSourceArtifact.getUUID()); String status = APIUtil.getLcStateFromArtifact(artifact); if (!APIConstants.CREATED.equals(status) && !APIConstants.PROTOTYPED.equals(status)) { //api at least is in published status if (APIUtil.hasPermission(getUserNameWithoutChange(), APIConstants.Permissions.API_PUBLISH)) { //user has publish permission isValid = true; } } else if (APIConstants.CREATED.equals(status) || APIConstants.PROTOTYPED.equals(status)) { //api in create status if (APIUtil.hasPermission(getUserNameWithoutChange(), APIConstants.Permissions.API_CREATE) || APIUtil.hasPermission(getUserNameWithoutChange(), APIConstants.Permissions.API_PUBLISH)) { //user has creat or publish permission isValid = true; } } } catch (RegistryException ex) { handleException("Error while validate user for API publishing", ex); } return isValid; } /** * Updates an existing API * * @param api API * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to update API * @throws org.wso2.carbon.apimgt.api.FaultGatewaysException on Gateway Failure */ @Override public void updateAPI(API api) throws APIManagementException, FaultGatewaysException { boolean isValid = isAPIUpdateValid(api); if (!isValid) { throw new APIManagementException(" User doesn't have permission for update"); } API oldApi = getAPIbyUUID(api.getUuid(), api.getOrganization()); String organization = api.getOrganization(); if (!oldApi.getStatus().equals(api.getStatus())) { // We don't allow API status updates via this method. // Use changeAPIStatus for that kind of updates. throw new APIManagementException("Invalid API update operation involving API status changes"); } validateKeyManagers(api); Gson gson = new Gson(); Map<String, String> oldMonetizationProperties = gson.fromJson(oldApi.getMonetizationProperties().toString(), HashMap.class); if (oldMonetizationProperties != null && !oldMonetizationProperties.isEmpty()) { Map<String, String> newMonetizationProperties = gson.fromJson(api.getMonetizationProperties().toString(), HashMap.class); if (newMonetizationProperties != null) { for (Map.Entry<String, String> entry : oldMonetizationProperties.entrySet()) { String newValue = newMonetizationProperties.get(entry.getKey()); if (StringUtils.isAllBlank(newValue)) { newMonetizationProperties.put(entry.getKey(), entry.getValue()); } } JSONParser parser = new JSONParser(); try { JSONObject jsonObj = (JSONObject) parser.parse(gson.toJson(newMonetizationProperties)); api.setMonetizationProperties(jsonObj); } catch (ParseException e) { throw new APIManagementException("Error when parsing monetization properties ", e); } } } String publishedDefaultVersion = getPublishedDefaultVersion(api.getId()); //Update WSDL in the registry if (api.getWsdlUrl() != null && api.getWsdlResource() == null) { updateWsdlFromUrl(api); } if (api.getWsdlResource() != null) { updateWsdlFromResourceFile(api); } boolean updatePermissions = false; if (APIUtil.isAccessControlEnabled()) { if (!oldApi.getAccessControl().equals(api.getAccessControl()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getAccessControl()) && !api.getAccessControlRoles().equals(oldApi.getAccessControlRoles())) || !oldApi.getVisibility().equals(api.getVisibility()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getVisibility()) && !api.getVisibleRoles().equals(oldApi.getVisibleRoles()))) { updatePermissions = true; } } else if (!oldApi.getVisibility().equals(api.getVisibility()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getVisibility()) && !api.getVisibleRoles().equals(oldApi.getVisibleRoles()))) { updatePermissions = true; } updateEndpointSecurity(oldApi, api); String apiUUid = updateApiArtifact(api, true, updatePermissions); api.setUuid(apiUUid); if (!oldApi.getContext().equals(api.getContext())) { api.setApiHeaderChanged(true); } int tenantId; String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); try { tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); } catch (UserStoreException e) { throw new APIManagementException( "Error in retrieving Tenant Information while updating api :" + api.getId().getApiName(), e); } validateResourceThrottlingTiers(api, tenantDomain); //get product resource mappings on API before updating the API. Update uri templates on api will remove all //product mappings as well. List<APIProductResource> productResources = apiMgtDAO.getProductMappingsForAPI(api); updateAPI(api, tenantId, userNameWithoutChange); updateProductResourceMappings(api, organization, productResources); if (log.isDebugEnabled()) { log.debug("Successfully updated the API: " + api.getId() + " in the database"); } JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, api.getId().getApiName()); apiLogObject.put(APIConstants.AuditLogConstants.CONTEXT, api.getContext()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, api.getId().getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, api.getId().getProviderName()); APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API, apiLogObject.toString(), APIConstants.AuditLogConstants.UPDATED, this.username); //update doc visibility List<Documentation> docsList = getAllDocumentation(api.getId()); if (docsList != null) { Iterator it = docsList.iterator(); while (it.hasNext()) { Object docsObject = it.next(); Documentation docs = (Documentation) docsObject; updateDocVisibility(api, docs); } } //notify key manager with API update registerOrUpdateResourceInKeyManager(api, tenantDomain); int apiId = apiMgtDAO.getAPIID(api.getUuid()); if (publishedDefaultVersion != null) { if (api.isPublishedDefaultVersion() && !api.getId().getVersion().equals(publishedDefaultVersion)) { APIIdentifier previousDefaultVersionIdentifier = new APIIdentifier(api.getId().getProviderName(), api.getId().getApiName(), publishedDefaultVersion); sendUpdateEventToPreviousDefaultVersion(previousDefaultVersionIdentifier, organization); } } APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, api.getId().getApiName(), apiId, api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); // Extracting API details for the recommendation system if (recommendationEnvironment != null) { RecommenderEventPublisher extractor = new RecommenderDetailsExtractor(api, tenantDomain, APIConstants.ADD_API); Thread recommendationThread = new Thread(extractor); recommendationThread.start(); } } private void sendUpdateEventToPreviousDefaultVersion(APIIdentifier apiIdentifier, String organization) throws APIManagementException { API api = apiMgtDAO.getLightWeightAPIInfoByAPIIdentifier(apiIdentifier, organization); APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, apiIdentifier.getApiName(), api.getId().getId(), api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); } public API updateAPI(API api, API existingAPI) throws APIManagementException { if (!existingAPI.getStatus().equals(api.getStatus())) { throw new APIManagementException("Invalid API update operation involving API status changes"); } String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); validateKeyManagers(api); String publishedDefaultVersion = getPublishedDefaultVersion(api.getId()); Gson gson = new Gson(); String organization = api.getOrganization(); Map<String, String> oldMonetizationProperties = gson.fromJson(existingAPI.getMonetizationProperties().toString(), HashMap.class); if (oldMonetizationProperties != null && !oldMonetizationProperties.isEmpty()) { Map<String, String> newMonetizationProperties = gson.fromJson(api.getMonetizationProperties().toString(), HashMap.class); if (newMonetizationProperties != null) { for (Map.Entry<String, String> entry : oldMonetizationProperties.entrySet()) { String newValue = newMonetizationProperties.get(entry.getKey()); if (StringUtils.isAllBlank(newValue)) { newMonetizationProperties.put(entry.getKey(), entry.getValue()); } } JSONParser parser = new JSONParser(); try { JSONObject jsonObj = (JSONObject) parser.parse(gson.toJson(newMonetizationProperties)); api.setMonetizationProperties(jsonObj); } catch (ParseException e) { throw new APIManagementException("Error when parsing monetization properties ", e); } } } updateEndpointSecurity(existingAPI, api); if (!existingAPI.getContext().equals(api.getContext())) { api.setApiHeaderChanged(true); } int tenantId = APIUtil.getInternalOrganizationId(organization); validateResourceThrottlingTiers(api, tenantDomain); //get product resource mappings on API before updating the API. Update uri templates on api will remove all //product mappings as well. List<APIProductResource> productResources = apiMgtDAO.getProductMappingsForAPI(api); updateAPI(api, tenantId, userNameWithoutChange); updateProductResourceMappings(api, organization, productResources); if (log.isDebugEnabled()) { log.debug("Successfully updated the API: " + api.getId() + " in the database"); } JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, api.getId().getApiName()); apiLogObject.put(APIConstants.AuditLogConstants.CONTEXT, api.getContext()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, api.getId().getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, api.getId().getProviderName()); loadMediationPoliciesToAPI(api, tenantDomain); try { api.setCreatedTime(existingAPI.getCreatedTime()); apiPersistenceInstance.updateAPI(new Organization(organization), APIMapper.INSTANCE.toPublisherApi(api)); } catch (APIPersistenceException e) { throw new APIManagementException("Error while updating API details", e); } APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API, apiLogObject.toString(), APIConstants.AuditLogConstants.UPDATED, this.username); //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); try { api.setCreatedTime(existingAPI.getCreatedTime()); apiPersistenceInstance.updateAPI(new Organization(organization), APIMapper.INSTANCE.toPublisherApi(api)); } catch (APIPersistenceException e) { throw new APIManagementException("Error while updating API details", e); } //notify key manager with API update registerOrUpdateResourceInKeyManager(api, tenantDomain); int apiId = apiMgtDAO.getAPIID(api.getUuid()); if (publishedDefaultVersion != null) { if (api.isPublishedDefaultVersion() && !api.getId().getVersion().equals(publishedDefaultVersion)) { APIIdentifier previousDefaultVersionIdentifier = new APIIdentifier(api.getId().getProviderName(), api.getId().getApiName(), publishedDefaultVersion); sendUpdateEventToPreviousDefaultVersion(previousDefaultVersionIdentifier, organization); } } APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, api.getId().getApiName(), apiId, api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); // Extracting API details for the recommendation system if (recommendationEnvironment != null) { RecommenderEventPublisher extractor = new RecommenderDetailsExtractor(api, tenantDomain, APIConstants.ADD_API); Thread recommendationThread = new Thread(extractor); recommendationThread.start(); } return api; } private void updateDocumentPermissions(API api, API oldApi) throws APIManagementException { boolean updatePermissions = false; if (APIUtil.isAccessControlEnabled()) { if (!oldApi.getAccessControl().equals(api.getAccessControl()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getAccessControl()) && !api.getAccessControlRoles().equals(oldApi.getAccessControlRoles())) || !oldApi.getVisibility().equals(api.getVisibility()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getVisibility()) && !api.getVisibleRoles().equals(oldApi.getVisibleRoles()))) { updatePermissions = true; } } else if (!oldApi.getVisibility().equals(api.getVisibility()) || (APIConstants.API_RESTRICTED_VISIBILITY.equals(oldApi.getVisibility()) && !api.getVisibleRoles().equals(oldApi.getVisibleRoles()))) { updatePermissions = true; } String visibleRolesList = api.getVisibleRoles(); String[] visibleRoles = new String[0]; if (visibleRolesList != null) { visibleRoles = visibleRolesList.split(","); } //TODO check if registry.beginTransaction(); flow is needed List<Documentation> docs = getAllDocumentation(api.getId()); if (updatePermissions) { APIManagerConfiguration config = getAPIManagerConfiguration(); boolean isSetDocLevelPermissions = Boolean.parseBoolean( config.getFirstProperty(APIConstants.API_PUBLISHER_ENABLE_API_DOC_VISIBILITY_LEVELS)); String docRootPath = APIUtil.getAPIDocPath(api.getId()); if (isSetDocLevelPermissions) { // Retain the docs for (Documentation doc : docs) { if ((APIConstants.DOC_API_BASED_VISIBILITY).equalsIgnoreCase(doc.getVisibility().name())) { String documentationPath = APIUtil.getAPIDocPath(api.getId()) + doc.getName(); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, documentationPath, registry); if (Documentation.DocumentSourceType.INLINE.equals(doc.getSourceType()) || Documentation.DocumentSourceType.MARKDOWN.equals(doc.getSourceType())) { String contentPath = APIUtil.getAPIDocContentPath(api.getId(), doc.getName()); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, contentPath, registry); } else if (Documentation.DocumentSourceType.FILE.equals(doc.getSourceType()) && doc.getFilePath() != null) { String filePath = APIUtil.getDocumentationFilePath(api.getId(), doc.getFilePath() .split("files" + RegistryConstants.PATH_SEPARATOR)[1]); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, filePath, registry); } } } } else { APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, docRootPath, registry); } } else { //In order to support content search feature - we need to update resource permissions of document resources //if their visibility is set to API level. if (docs != null) { for (Documentation doc : docs) { if ((APIConstants.DOC_API_BASED_VISIBILITY).equalsIgnoreCase(doc.getVisibility().name())) { String documentationPath = APIUtil.getAPIDocPath(api.getId()) + doc.getName(); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, documentationPath, registry); } } } } //update doc visibility if (docs != null) { Iterator it = docs.iterator(); while (it.hasNext()) { Object docsObject = it.next(); Documentation doc = (Documentation) docsObject; updateDocVisibility(api,doc); } } } private void validateKeyManagers(API api) throws APIManagementException { List<KeyManagerConfigurationDTO> keyManagerConfigurationsByTenant = apiMgtDAO.getKeyManagerConfigurationsByOrganization(tenantDomain); List<String> configuredMissingKeyManagers = new ArrayList<>(); for (String keyManager : api.getKeyManagers()) { if (!APIConstants.KeyManager.API_LEVEL_ALL_KEY_MANAGERS.equals(keyManager)) { KeyManagerConfigurationDTO selectedKeyManager = null; for (KeyManagerConfigurationDTO keyManagerConfigurationDTO : keyManagerConfigurationsByTenant) { if (keyManager.equals(keyManagerConfigurationDTO.getName())) { selectedKeyManager = keyManagerConfigurationDTO; break; } } if (selectedKeyManager == null) { configuredMissingKeyManagers.add(keyManager); } } } if (!configuredMissingKeyManagers.isEmpty()) { throw new APIManagementException( "Key Manager(s) Not found :" + String.join(" , ", configuredMissingKeyManagers), ExceptionCodes.KEY_MANAGER_NOT_REGISTERED); } } /** * Update API metadata and resources. * * @param api API to update * @param tenantId Tenant Id * @param username Username of the user who is updating * @throws APIManagementException If fails to update API. */ private void updateAPI(API api, int tenantId, String username) throws APIManagementException { apiMgtDAO.updateAPI(api, username); if (log.isDebugEnabled()) { log.debug("Successfully updated the API: " + api.getId() + " metadata in the database"); } updateAPIResources(api, tenantId); } /** * Update resources of the API including local scopes and resource to scope attachments. * * @param api API * @param tenantId Tenant Id * @throws APIManagementException If fails to update local scopes of the API. */ private void updateAPIResources(API api, int tenantId) throws APIManagementException { String tenantDomain = APIUtil.getTenantDomainFromTenantId(tenantId); APIIdentifier apiIdentifier = api.getId(); // Get the new URI templates for the API Set<URITemplate> uriTemplates = api.getUriTemplates(); // Get the existing local scope keys attached for the API Set<String> oldLocalScopeKeys = apiMgtDAO.getAllLocalScopeKeysForAPI(api.getUuid(), tenantId); // Get the existing URI templates for the API Set<URITemplate> oldURITemplates = apiMgtDAO.getURITemplatesOfAPI(api.getUuid()); // Get the new local scope keys from URI templates Set<Scope> newLocalScopes = getScopesToRegisterFromURITemplates(api.getUuid(), api.getOrganization(), uriTemplates); Set<String> newLocalScopeKeys = newLocalScopes.stream().map(Scope::getKey).collect(Collectors.toSet()); // Get the existing versioned local scope keys attached for the API Set<String> oldVersionedLocalScopeKeys = apiMgtDAO.getVersionedLocalScopeKeysForAPI(api.getUuid(), tenantId); // Get the existing versioned local scope keys which needs to be removed (not updated) from the current updating // API and remove them from the oldLocalScopeKeys set before sending to KM, so that they will not be removed // from KM and can be still used by other versioned APIs. Iterator oldLocalScopesItr = oldLocalScopeKeys.iterator(); while (oldLocalScopesItr.hasNext()) { String oldLocalScopeKey = (String) oldLocalScopesItr.next(); // if the scope is used in versioned APIs and it is not in new local scope key set if (oldVersionedLocalScopeKeys.contains(oldLocalScopeKey) && !newLocalScopeKeys.contains(oldLocalScopeKey)) { //remove from old local scope key set which will be send to KM oldLocalScopesItr.remove(); } } apiMgtDAO.updateURITemplates(api, tenantId); if (log.isDebugEnabled()) { log.debug("Successfully updated the URI templates of API: " + apiIdentifier + " in the database"); } // Update the resource scopes of the API in KM. // Need to remove the old local scopes and register new local scopes and, update the resource scope mappings // using the updated URI templates of the API. deleteScopes(oldLocalScopeKeys, tenantId); addScopes(newLocalScopes, tenantId); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.updateResourceScopes(api, oldLocalScopeKeys, newLocalScopes, oldURITemplates, uriTemplates); if (log.isDebugEnabled()) { log.debug("Successfully updated the resource scopes of API: " + apiIdentifier + " in Key Manager "+ keyManagerDtoEntry.getKey()+" ."); } } catch (APIManagementException e) { log.error("Error while updating resource to scope attachment in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } private void updateEndpointSecurity(API oldApi, API api) throws APIManagementException { try { if (api.isEndpointSecured() && StringUtils.isBlank(api.getEndpointUTPassword()) && !StringUtils.isBlank(oldApi.getEndpointUTPassword())) { if (log.isDebugEnabled()) { log.debug("Given endpoint security password is empty"); } api.setEndpointUTUsername(oldApi.getEndpointUTUsername()); api.setEndpointUTPassword(oldApi.getEndpointUTPassword()); if (log.isDebugEnabled()) { log.debug("Using the previous username and password for endpoint security"); } } else { String endpointConfig = api.getEndpointConfig(); String oldEndpointConfig = oldApi.getEndpointConfig(); if (StringUtils.isNotEmpty(endpointConfig) && StringUtils.isNotEmpty(oldEndpointConfig)) { JSONObject endpointConfigJson = (JSONObject) new JSONParser().parse(endpointConfig); JSONObject oldEndpointConfigJson = (JSONObject) new JSONParser().parse(oldEndpointConfig); if ((endpointConfigJson.get(APIConstants.ENDPOINT_SECURITY) != null) && (oldEndpointConfigJson.get(APIConstants.ENDPOINT_SECURITY) != null)) { JSONObject endpointSecurityJson = (JSONObject) endpointConfigJson.get(APIConstants.ENDPOINT_SECURITY); JSONObject oldEndpointSecurityJson = (JSONObject) oldEndpointConfigJson.get(APIConstants.ENDPOINT_SECURITY); if (endpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_PRODUCTION) != null) { if (oldEndpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_PRODUCTION) != null) { EndpointSecurity endpointSecurity = new ObjectMapper().convertValue( endpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_PRODUCTION), EndpointSecurity.class); EndpointSecurity oldEndpointSecurity = new ObjectMapper().convertValue( oldEndpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_PRODUCTION), EndpointSecurity.class); if (endpointSecurity.isEnabled() && oldEndpointSecurity.isEnabled() && StringUtils.isBlank(endpointSecurity.getPassword())) { endpointSecurity.setUsername(oldEndpointSecurity.getUsername()); endpointSecurity.setPassword(oldEndpointSecurity.getPassword()); if (endpointSecurity.getType().equals(APIConstants.ENDPOINT_SECURITY_TYPE_OAUTH)) { endpointSecurity.setUniqueIdentifier(oldEndpointSecurity.getUniqueIdentifier()); endpointSecurity.setGrantType(oldEndpointSecurity.getGrantType()); endpointSecurity.setTokenUrl(oldEndpointSecurity.getTokenUrl()); endpointSecurity.setClientId(oldEndpointSecurity.getClientId()); endpointSecurity.setClientSecret(oldEndpointSecurity.getClientSecret()); endpointSecurity.setCustomParameters(oldEndpointSecurity.getCustomParameters()); } } endpointSecurityJson.replace(APIConstants.ENDPOINT_SECURITY_PRODUCTION, new JSONParser() .parse(new ObjectMapper().writeValueAsString(endpointSecurity))); } } if (endpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_SANDBOX) != null) { if (oldEndpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_SANDBOX) != null) { EndpointSecurity endpointSecurity = new ObjectMapper() .convertValue(endpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_SANDBOX), EndpointSecurity.class); EndpointSecurity oldEndpointSecurity = new ObjectMapper() .convertValue(oldEndpointSecurityJson.get(APIConstants.ENDPOINT_SECURITY_SANDBOX), EndpointSecurity.class); if (endpointSecurity.isEnabled() && oldEndpointSecurity.isEnabled() && StringUtils.isBlank(endpointSecurity.getPassword())) { endpointSecurity.setUsername(oldEndpointSecurity.getUsername()); endpointSecurity.setPassword(oldEndpointSecurity.getPassword()); if (endpointSecurity.getType().equals(APIConstants.ENDPOINT_SECURITY_TYPE_OAUTH)) { endpointSecurity.setUniqueIdentifier(oldEndpointSecurity.getUniqueIdentifier()); endpointSecurity.setGrantType(oldEndpointSecurity.getGrantType()); endpointSecurity.setTokenUrl(oldEndpointSecurity.getTokenUrl()); endpointSecurity.setClientId(oldEndpointSecurity.getClientId()); endpointSecurity.setClientSecret(oldEndpointSecurity.getClientSecret()); endpointSecurity.setCustomParameters(oldEndpointSecurity.getCustomParameters()); } } endpointSecurityJson.replace(APIConstants.ENDPOINT_SECURITY_SANDBOX, new JSONParser() .parse(new ObjectMapper().writeValueAsString(endpointSecurity))); } endpointConfigJson.replace(APIConstants.ENDPOINT_SECURITY,endpointSecurityJson); } } api.setEndpointConfig(endpointConfigJson.toJSONString()); } } } catch (ParseException | JsonProcessingException e) { throw new APIManagementException( "Error while processing endpoint security for API " + api.getId().toString(), e); } } private String updateApiArtifact(API api, boolean updateMetadata, boolean updatePermissions) throws APIManagementException { //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); boolean transactionCommitted = false; String apiUUID = null; try { registry.beginTransaction(); String apiArtifactId = registry.get(APIUtil.getAPIPath(api.getId())).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId); if (artifactManager == null) { String errorMessage = "Artifact manager is null when updating API artifact ID " + api.getId(); log.error(errorMessage); throw new APIManagementException(errorMessage); } String oldStatus = artifact.getAttribute(APIConstants.API_OVERVIEW_STATUS); Resource apiResource = registry.get(artifact.getPath()); String oldAccessControlRoles = api.getAccessControlRoles(); if (apiResource != null) { oldAccessControlRoles = registry.get(artifact.getPath()).getProperty(APIConstants.PUBLISHER_ROLES); } GenericArtifact updateApiArtifact = APIUtil.createAPIArtifactContent(artifact, api); String artifactPath = GovernanceUtils.getArtifactPath(registry, updateApiArtifact.getId()); org.wso2.carbon.registry.core.Tag[] oldTags = registry.getTags(artifactPath); if (oldTags != null) { for (org.wso2.carbon.registry.core.Tag tag : oldTags) { registry.removeTag(artifactPath, tag.getTagName()); } } Set<String> tagSet = api.getTags(); if (tagSet != null) { for (String tag : tagSet) { registry.applyTag(artifactPath, tag); } } if (updateMetadata && api.getEndpointConfig() != null && !api.getEndpointConfig().isEmpty()) { // If WSDL URL get change only we update registry WSDL resource. If its registry resource patch we // will skip registry update. Only if this API created with WSDL end point type we need to update // wsdls for each update. //check for wsdl endpoint org.json.JSONObject response1 = new org.json.JSONObject(api.getEndpointConfig()); boolean isWSAPI = APIConstants.APITransportType.WS.toString().equals(api.getType()); String wsdlURL; if (!APIUtil.isStreamingApi(api) && "wsdl".equalsIgnoreCase(response1.get("endpoint_type").toString()) && response1.has("production_endpoints")) { wsdlURL = response1.getJSONObject("production_endpoints").get("url").toString(); if (APIUtil.isValidWSDLURL(wsdlURL, true)) { String path = APIUtil.createWSDL(registry, api); if (path != null) { // reset the wsdl path to permlink updateApiArtifact.setAttribute(APIConstants.API_OVERVIEW_WSDL, api.getWsdlUrl()); } } } } artifactManager.updateGenericArtifact(updateApiArtifact); //write API Status to a separate property. This is done to support querying APIs using custom query (SQL) //to gain performance String apiStatus = api.getStatus().toUpperCase(); saveAPIStatus(artifactPath, apiStatus); String[] visibleRoles = new String[0]; String publisherAccessControlRoles = api.getAccessControlRoles(); updateRegistryResources(artifactPath, publisherAccessControlRoles, api.getAccessControl(), api.getAdditionalProperties()); //propagate api status change and access control roles change to document artifact String newStatus = updateApiArtifact.getAttribute(APIConstants.API_OVERVIEW_STATUS); if (!StringUtils.equals(oldStatus, newStatus) || !StringUtils.equals(oldAccessControlRoles, publisherAccessControlRoles)) { APIUtil.notifyAPIStateChangeToAssociatedDocuments(artifact, registry); } if (updatePermissions) { APIUtil.clearResourcePermissions(artifactPath, api.getId(), ((UserRegistry) registry).getTenantId()); String visibleRolesList = api.getVisibleRoles(); if (visibleRolesList != null) { visibleRoles = visibleRolesList.split(","); } APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, artifactPath, registry); } //attaching api categories to the API List<APICategory> attachedApiCategories = api.getApiCategories(); artifact.removeAttribute(APIConstants.API_CATEGORIES_CATEGORY_NAME); if (attachedApiCategories != null) { for (APICategory category : attachedApiCategories) { artifact.addAttribute(APIConstants.API_CATEGORIES_CATEGORY_NAME, category.getName()); } } registry.commitTransaction(); transactionCommitted = true; apiUUID = updateApiArtifact.getId(); if (updatePermissions) { APIManagerConfiguration config = getAPIManagerConfiguration(); boolean isSetDocLevelPermissions = Boolean.parseBoolean( config.getFirstProperty(APIConstants.API_PUBLISHER_ENABLE_API_DOC_VISIBILITY_LEVELS)); String docRootPath = APIUtil.getAPIDocPath(api.getId()); if (isSetDocLevelPermissions) { // Retain the docs List<Documentation> docs = getAllDocumentation(api.getId()); for (Documentation doc : docs) { if ((APIConstants.DOC_API_BASED_VISIBILITY).equalsIgnoreCase(doc.getVisibility().name())) { String documentationPath = APIUtil.getAPIDocPath(api.getId()) + doc.getName(); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, documentationPath, registry); if (Documentation.DocumentSourceType.INLINE.equals(doc.getSourceType()) || Documentation.DocumentSourceType.MARKDOWN.equals(doc.getSourceType())) { String contentPath = APIUtil.getAPIDocContentPath(api.getId(), doc.getName()); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, contentPath, registry); } else if (Documentation.DocumentSourceType.FILE.equals(doc.getSourceType()) && doc.getFilePath() != null) { String filePath = APIUtil.getDocumentationFilePath(api.getId(), doc.getFilePath() .split("files" + RegistryConstants.PATH_SEPARATOR)[1]); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, filePath, registry); } } } } else { APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, docRootPath, registry); } } else { //In order to support content search feature - we need to update resource permissions of document resources //if their visibility is set to API level. List<Documentation> docs = getAllDocumentation(api.getId()); if (docs != null) { for (Documentation doc : docs) { if ((APIConstants.DOC_API_BASED_VISIBILITY).equalsIgnoreCase(doc.getVisibility().name())) { String documentationPath = APIUtil.getAPIDocPath(api.getId()) + doc.getName(); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, documentationPath, registry); } } } } } catch (Exception e) { try { registry.rollbackTransaction(); } catch (RegistryException re) { // Throwing an error from this level will mask the original exception log.error("Error while rolling back the transaction for API: " + api.getId().getApiName(), re); } handleException("Error while performing registry transaction operation", e); } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException ex) { handleException("Error occurred while rolling back the transaction.", ex); } } return apiUUID; } public Map<String, String> propergateAPIStatusChangeToGateways(String newStatus, API api) throws APIManagementException { Map<String, String> failedGateways = new HashMap<String, String>(); APIIdentifier identifier = api.getId(); String providerTenantMode = identifier.getProviderName(); String name = identifier.getApiName(); String version = identifier.getVersion(); boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } if (api != null) { String currentStatus = api.getStatus(); if (APIConstants.PUBLISHED.equals(newStatus) || !currentStatus.equals(newStatus)) { api.setStatus(newStatus); APIManagerConfiguration config = getAPIManagerConfiguration(); String gatewayType = config.getFirstProperty(APIConstants.API_GATEWAY_TYPE); api.setAsPublishedDefaultVersion(api.getId().getVersion() .equals(apiMgtDAO.getPublishedDefaultVersion(api.getId()))); loadMediationPoliciesToAPI(api, tenantDomain); } } else { handleException("Couldn't find an API with the name-" + name + "version-" + version); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return failedGateways; } private void loadMediationPoliciesToAPI(API api, String organization) throws APIManagementException { if (APIUtil.isSequenceDefined(api.getInSequence()) || APIUtil.isSequenceDefined(api.getOutSequence()) || APIUtil.isSequenceDefined(api.getFaultSequence())) { Organization org = new Organization(organization); String apiUUID = api.getUuid(); // get all policies try { List<MediationInfo> localPolicies = apiPersistenceInstance.getAllMediationPolicies(org, apiUUID); List<Mediation> globalPolicies = null; if (APIUtil.isSequenceDefined(api.getInSequence())) { boolean found = false; for (MediationInfo mediationInfo : localPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN.equals(mediationInfo.getType()) && api.getInSequence().equals(mediationInfo.getName())) { org.wso2.carbon.apimgt.persistence.dto.Mediation mediationPolicy = apiPersistenceInstance .getMediationPolicy(org, apiUUID, mediationInfo.getId()); Mediation mediation = new Mediation(); mediation.setConfig(mediationPolicy.getConfig()); mediation.setName(mediationPolicy.getName()); mediation.setUuid(mediationPolicy.getId()); mediation.setType(APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); mediation.setGlobal(false); api.setInSequenceMediation(mediation); found = true; break; } } if (!found) { // global policy if (globalPolicies == null) { globalPolicies = getAllGlobalMediationPolicies(); } for (Mediation m : globalPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN.equals(m.getType()) && api.getInSequence().equals(m.getName())) { Mediation mediation = getGlobalMediationPolicy(m.getUuid()); mediation.setGlobal(true); api.setInSequenceMediation(mediation); found = true; break; } } } } if (APIUtil.isSequenceDefined(api.getOutSequence())) { boolean found = false; for (MediationInfo mediationInfo : localPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT.equals(mediationInfo.getType()) && api.getOutSequence().equals(mediationInfo.getName())) { org.wso2.carbon.apimgt.persistence.dto.Mediation mediationPolicy = apiPersistenceInstance .getMediationPolicy(org, apiUUID, mediationInfo.getId()); Mediation mediation = new Mediation(); mediation.setConfig(mediationPolicy.getConfig()); mediation.setName(mediationPolicy.getName()); mediation.setUuid(mediationPolicy.getId()); mediation.setType(APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); mediation.setGlobal(false); api.setOutSequenceMediation(mediation); found = true; break; } } if (!found) { // global policy if (globalPolicies == null) { globalPolicies = getAllGlobalMediationPolicies(); } for (Mediation m : globalPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT.equals(m.getType()) && api.getOutSequence().equals(m.getName())) { Mediation mediation = getGlobalMediationPolicy(m.getUuid()); mediation.setGlobal(true); api.setOutSequenceMediation(mediation); found = true; break; } } } } if (APIUtil.isSequenceDefined(api.getFaultSequence())) { boolean found = false; for (MediationInfo mediationInfo : localPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT.equals(mediationInfo.getType()) && api.getFaultSequence().equals(mediationInfo.getName())) { org.wso2.carbon.apimgt.persistence.dto.Mediation mediationPolicy = apiPersistenceInstance .getMediationPolicy(org, apiUUID, mediationInfo.getId()); Mediation mediation = new Mediation(); mediation.setConfig(mediationPolicy.getConfig()); mediation.setName(mediationPolicy.getName()); mediation.setUuid(mediationPolicy.getId()); mediation.setType(APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); mediation.setGlobal(false); api.setFaultSequenceMediation(mediation); found = true; break; } } if (!found) { // global policy if (globalPolicies == null) { globalPolicies = getAllGlobalMediationPolicies(); } for (Mediation m : globalPolicies) { if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT.equals(m.getType()) && api.getFaultSequence().equals(m.getName())) { Mediation mediation = getGlobalMediationPolicy(m.getUuid()); mediation.setGlobal(true); api.setFaultSequenceMediation(mediation); found = true; break; } } } } } catch (MediationPolicyPersistenceException e) { throw new APIManagementException("Error while loading medation policies", e); } } } public boolean updateAPIforStateChange(API api, String currentStatus, String newStatus, Map<String, String> failedGatewaysMap) throws APIManagementException, FaultGatewaysException { boolean isSuccess = false; Map<String, Map<String, String>> failedGateways = new ConcurrentHashMap<String, Map<String, String>>(); String provider = api.getId().getProviderName(); String providerTenantMode = api.getId().getProviderName(); provider = APIUtil.replaceEmailDomain(provider); String name = api.getId().getApiName(); String version = api.getId().getVersion(); boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } //APIIdentifier apiId = new APIIdentifier(provider, name, version); //API api = getAPI(apiId); if (api != null) { //String currentStatus = api.getStatus(); if (!currentStatus.equals(newStatus)) { api.setStatus(newStatus); // If API status changed to publish we should add it to recently added APIs list // this should happen in store-publisher cluster domain if deployment is distributed // IF new API published we will add it to recently added APIs Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER) .getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME).removeAll(); api.setAsPublishedDefaultVersion(api.getId().getVersion() .equals(apiMgtDAO.getPublishedDefaultVersion(api.getId()))); if (failedGatewaysMap != null) { if (APIConstants.PUBLISHED.equals(newStatus) || APIConstants.DEPRECATED.equals(newStatus) || APIConstants.BLOCKED.equals(newStatus) || APIConstants.PROTOTYPED.equals(newStatus)) { Map<String, String> failedToPublishEnvironments = failedGatewaysMap; if (!failedToPublishEnvironments.isEmpty()) { Set<String> publishedEnvironments = new HashSet<String>(api.getEnvironments()); publishedEnvironments.removeAll(new ArrayList<String>(failedToPublishEnvironments .keySet())); api.setEnvironments(publishedEnvironments); //updateApiArtifactNew(api, true, false); failedGateways.clear(); failedGateways.put("UNPUBLISHED", Collections.<String, String>emptyMap()); failedGateways.put("PUBLISHED", failedToPublishEnvironments); } } else { // API Status : RETIRED or CREATED Map<String, String> failedToRemoveEnvironments = failedGatewaysMap; if(!APIConstants.CREATED.equals(newStatus)) { cleanUpPendingSubscriptionCreationProcessesByAPI(api.getUuid()); apiMgtDAO.removeAllSubscriptions(api.getUuid()); } if (!failedToRemoveEnvironments.isEmpty()) { Set<String> publishedEnvironments = new HashSet<String>(api.getEnvironments()); publishedEnvironments.addAll(failedToRemoveEnvironments.keySet()); api.setEnvironments(publishedEnvironments); //updateApiArtifactNew(api, true, false); failedGateways.clear(); failedGateways.put("UNPUBLISHED", failedToRemoveEnvironments); failedGateways.put("PUBLISHED", Collections.<String, String>emptyMap()); } } } //updateApiArtifactNew(api, false, false); PublisherAPI publisherAPI = APIMapper.INSTANCE.toPublisherApi(api); try { apiPersistenceInstance.updateAPI(new Organization(api.getOrganization()), publisherAPI); } catch (APIPersistenceException e) { handleException("Error while persisting the updated API ", e); } } isSuccess = true; } else { handleException("Couldn't find an API with the name-" + name + "version-" + version); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } if (!failedGateways.isEmpty() && (!failedGateways.get("UNPUBLISHED").isEmpty() || !failedGateways.get("PUBLISHED").isEmpty())) { throw new FaultGatewaysException(failedGateways); } return isSuccess; } public boolean updateAPIforStateChange(APIIdentifier identifier, String newStatus, Map<String, String> failedGatewaysMap, API api) throws APIManagementException, FaultGatewaysException { boolean isSuccess = false; Map<String, Map<String, String>> failedGateways = new ConcurrentHashMap<String, Map<String, String>>(); String provider = identifier.getProviderName(); String providerTenantMode = identifier.getProviderName(); provider = APIUtil.replaceEmailDomain(provider); String name = identifier.getApiName(); String version = identifier.getVersion(); boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } APIIdentifier apiId = new APIIdentifier(provider, name, version); if (api != null) { String currentStatus = api.getStatus(); if (!currentStatus.equals(newStatus)) { api.setStatus(newStatus); // If API status changed to publish we should add it to recently added APIs list // this should happen in store-publisher cluster domain if deployment is distributed // IF new API published we will add it to recently added APIs Caching.getCacheManager(APIConstants.API_MANAGER_CACHE_MANAGER) .getCache(APIConstants.RECENTLY_ADDED_API_CACHE_NAME).removeAll(); api.setAsPublishedDefaultVersion(api.getId().getVersion() .equals(apiMgtDAO.getPublishedDefaultVersion(api.getId()))); if (failedGatewaysMap != null) { if (APIConstants.PUBLISHED.equals(newStatus) || APIConstants.DEPRECATED.equals(newStatus) || APIConstants.BLOCKED.equals(newStatus) || APIConstants.PROTOTYPED.equals(newStatus)) { Map<String, String> failedToPublishEnvironments = failedGatewaysMap; if (!failedToPublishEnvironments.isEmpty()) { Set<String> publishedEnvironments = new HashSet<String>(api.getEnvironments()); publishedEnvironments.removeAll(new ArrayList<String>(failedToPublishEnvironments .keySet())); api.setEnvironments(publishedEnvironments); updateApiArtifact(api, true, false); failedGateways.clear(); failedGateways.put("UNPUBLISHED", Collections.<String, String>emptyMap()); failedGateways.put("PUBLISHED", failedToPublishEnvironments); } } else { // API Status : RETIRED or CREATED Map<String, String> failedToRemoveEnvironments = failedGatewaysMap; if(!APIConstants.CREATED.equals(newStatus)) { cleanUpPendingSubscriptionCreationProcessesByAPI(api.getUuid()); apiMgtDAO.removeAllSubscriptions(api.getUuid()); } if (!failedToRemoveEnvironments.isEmpty()) { Set<String> publishedEnvironments = new HashSet<String>(api.getEnvironments()); publishedEnvironments.addAll(failedToRemoveEnvironments.keySet()); api.setEnvironments(publishedEnvironments); updateApiArtifact(api, true, false); failedGateways.clear(); failedGateways.put("UNPUBLISHED", failedToRemoveEnvironments); failedGateways.put("PUBLISHED", Collections.<String, String>emptyMap()); } } } updateApiArtifact(api, false, false); } isSuccess = true; } else { handleException("Couldn't find an API with the name-" + name + "version-" + version); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } if (!failedGateways.isEmpty() && (!failedGateways.get("UNPUBLISHED").isEmpty() || !failedGateways.get("PUBLISHED").isEmpty())) { throw new FaultGatewaysException(failedGateways); } return isSuccess; } /** * Function returns true if the specified API already exists in the registry * * @param identifier * @return * @throws APIManagementException */ public boolean checkIfAPIExists(APIIdentifier identifier) throws APIManagementException { String apiPath = APIUtil.getAPIPath(identifier); try { String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(identifier.getProviderName())); Registry registry; if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int id = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceSystemRegistry(id); } else { if (this.tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(this.tenantDomain)) { registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceUserRegistry( identifier.getProviderName(), MultitenantConstants.SUPER_TENANT_ID); } else { if (this.tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(this.tenantDomain)) { registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceUserRegistry( identifier.getProviderName(), MultitenantConstants.SUPER_TENANT_ID); } else { registry = this.registry; } } } return registry.resourceExists(apiPath); } catch (RegistryException e) { handleException("Failed to get API from : " + apiPath, e); return false; } catch (UserStoreException e) { handleException("Failed to get API from : " + apiPath, e); return false; } } public void makeAPIKeysForwardCompatible(API api) throws APIManagementException { String provider = api.getId().getProviderName(); String apiName = api.getId().getApiName(); Set<String> versions = getAPIVersions(provider, apiName, api.getOrganization()); APIVersionComparator comparator = new APIVersionComparator(); List<API> sortedAPIs = new ArrayList<API>(); for (String version : versions) { if (version.equals(api.getId().getVersion())) { continue; } API otherApi = new API(new APIIdentifier(provider, apiName, version));//getAPI(new APIIdentifier(provider, apiName, version)); if (comparator.compare(otherApi, api) < 0 && !APIConstants.RETIRED.equals(otherApi.getStatus())) { sortedAPIs.add(otherApi); } } // Get the subscriptions from the latest api version first Collections.sort(sortedAPIs, comparator); apiMgtDAO.makeKeysForwardCompatible(new ApiTypeWrapper(api), sortedAPIs); } /** * Returns the subscriber name for the given subscription id. * * @param subscriptionId The subscription id of the subscriber to be returned * @return The subscriber or null if the requested subscriber does not exist * @throws APIManagementException if failed to get Subscriber */ @Override public String getSubscriber(String subscriptionId) throws APIManagementException { return apiMgtDAO.getSubscriberName(subscriptionId); } /** * Returns the claims of subscriber for the given subscriber. * * @param subscriber The name of the subscriber to be returned * @return The looked up claims of the subscriber or null if the requested subscriber does not exist * @throws APIManagementException if failed to get Subscriber */ @Override public Map<String, String> getSubscriberClaims(String subscriber) throws APIManagementException { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber); int tenantId = 0; Map<String, String> claimMap = new HashMap<>(); Map<String, String> subscriberClaims = null; String configuredClaims = ""; try { tenantId = getTenantId(tenantDomain); UserStoreManager userStoreManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getUserStoreManager(); if (userStoreManager.isExistingUser(subscriber)) { subscriberClaims = APIUtil.getClaims(subscriber, tenantId, ClaimsRetriever.DEFAULT_DIALECT_URI); APIManagerConfiguration configuration = getAPIManagerConfiguration(); configuredClaims = configuration.getFirstProperty(APIConstants.API_PUBLISHER_SUBSCRIBER_CLAIMS); } for (String claimURI : configuredClaims.split(",")) { if (subscriberClaims != null) { claimMap.put(claimURI, subscriberClaims.get(claimURI)); } } } catch (UserStoreException e) { throw new APIManagementException("Error while retrieving tenant id for tenant domain " + tenantDomain, e); } return claimMap; } private Set<API> getAssociatedAPIs(APIProduct apiProduct) throws APIManagementException { List<APIProductResource> productResources = apiProduct.getProductResources(); Set<API> apis = new HashSet<>(); for (APIProductResource productResource : productResources) { API api = getAPIbyUUID(productResource.getApiId(), CarbonContext.getThreadLocalCarbonContext().getTenantDomain()); apis.add(api); } return apis; } /** * This method returns a list of previous versions of a given API * * @param api * @return oldPublishedAPIList * @throws APIManagementException */ private List<APIIdentifier> getOldPublishedAPIList(API api) throws APIManagementException { List<APIIdentifier> oldPublishedAPIList = new ArrayList<APIIdentifier>(); List<API> apiList = getAPIVersionsByProviderAndName(api.getId().getProviderName(), api.getId().getName(), api.getOrganization()); APIVersionComparator versionComparator = new APIVersionComparator(); for (API oldAPI : apiList) { if (oldAPI.getId().getApiName().equals(api.getId().getApiName()) && versionComparator.compare(oldAPI, api) < 0 && (oldAPI.getStatus().equals(APIConstants.PUBLISHED))) { oldPublishedAPIList.add(oldAPI.getId()); } } return oldPublishedAPIList; } /** * This method used to send notifications to the previous subscribers of older versions of a given API * * @param api * @throws APIManagementException */ private void sendEmailNotification(API api) throws APIManagementException { try { JSONObject tenantConfig = APIUtil.getTenantConfig(tenantDomain); String isNotificationEnabled = "false"; if (tenantConfig.containsKey(NotifierConstants.NOTIFICATIONS_ENABLED)) { isNotificationEnabled = (String) tenantConfig.get(NotifierConstants.NOTIFICATIONS_ENABLED); } if (JavaUtils.isTrueExplicitly(isNotificationEnabled)) { List<APIIdentifier> apiIdentifiers = getOldPublishedAPIList(api); for (APIIdentifier oldAPI : apiIdentifiers) { Properties prop = new Properties(); prop.put(NotifierConstants.API_KEY, oldAPI); prop.put(NotifierConstants.NEW_API_KEY, api.getId()); Set<Subscriber> subscribersOfAPI = apiMgtDAO.getSubscribersOfAPI(oldAPI); prop.put(NotifierConstants.SUBSCRIBERS_PER_API, subscribersOfAPI); NotificationDTO notificationDTO = new NotificationDTO(prop, NotifierConstants.NOTIFICATION_TYPE_NEW_VERSION); notificationDTO.setTenantID(tenantId); notificationDTO.setTenantDomain(tenantDomain); new NotificationExecutor().sendAsyncNotifications(notificationDTO); } } } catch (NotificationException e) { log.error(e.getMessage(), e); } } /** * This method used to validate and set transports in api * @param api * @throws APIManagementException */ private void validateAndSetTransports(API api) throws APIManagementException { String transports = api.getTransports(); if (!StringUtils.isEmpty(transports) && !("null".equalsIgnoreCase(transports))) { if (transports.contains(",")) { StringTokenizer st = new StringTokenizer(transports, ","); while (st.hasMoreTokens()) { checkIfValidTransport(st.nextToken()); } } else { checkIfValidTransport(transports); } } else { api.setTransports(Constants.TRANSPORT_HTTP + ',' + Constants.TRANSPORT_HTTPS); } } /** * This method used to validate and set transports in api product * @param apiProduct * @throws APIManagementException */ private void validateAndSetTransports(APIProduct apiProduct) throws APIManagementException { String transports = apiProduct.getTransports(); if (!StringUtils.isEmpty(transports) && !("null".equalsIgnoreCase(transports))) { if (transports.contains(",")) { StringTokenizer st = new StringTokenizer(transports, ","); while (st.hasMoreTokens()) { checkIfValidTransport(st.nextToken()); } } else { checkIfValidTransport(transports); } } else { apiProduct.setTransports(Constants.TRANSPORT_HTTP + ',' + Constants.TRANSPORT_HTTPS); } } /** * This method used to select security level according to given api Security * @param apiSecurity * @return */ private ArrayList<String> selectSecurityLevels(String apiSecurity) { ArrayList<String> securityLevels = new ArrayList<>(); String[] apiSecurityLevels = apiSecurity.split(","); boolean isOauth2 = false; boolean isMutualSSL = false; boolean isBasicAuth = false; boolean isApiKey = false; boolean isMutualSSLMandatory = false; boolean isOauthBasicAuthMandatory = false; boolean securitySchemeFound = false; for (String apiSecurityLevel : apiSecurityLevels) { if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.DEFAULT_API_SECURITY_OAUTH2)) { isOauth2 = true; securityLevels.add(APIConstants.DEFAULT_API_SECURITY_OAUTH2); securitySchemeFound = true; } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_MUTUAL_SSL)) { isMutualSSL = true; securityLevels.add(APIConstants.API_SECURITY_MUTUAL_SSL); securitySchemeFound = true; } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_BASIC_AUTH)) { isBasicAuth = true; securityLevels.add(APIConstants.API_SECURITY_BASIC_AUTH); securitySchemeFound = true; } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_API_KEY)){ isApiKey = true; securityLevels.add(APIConstants.API_SECURITY_API_KEY); securitySchemeFound = true; } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_MUTUAL_SSL_MANDATORY)) { isMutualSSLMandatory = true; securityLevels.add(APIConstants.API_SECURITY_MUTUAL_SSL_MANDATORY); } if (apiSecurityLevel.trim().equalsIgnoreCase(APIConstants.API_SECURITY_OAUTH_BASIC_AUTH_API_KEY_MANDATORY)) { isOauthBasicAuthMandatory = true; securityLevels.add(APIConstants.API_SECURITY_OAUTH_BASIC_AUTH_API_KEY_MANDATORY); } } // If no security schema found, set OAuth2 as default if (!securitySchemeFound) { isOauth2 = true; securityLevels.add(APIConstants.DEFAULT_API_SECURITY_OAUTH2); } // If Only OAuth2/Basic-Auth specified, set it as mandatory if (!isMutualSSL && !isOauthBasicAuthMandatory) { securityLevels.add(APIConstants.API_SECURITY_OAUTH_BASIC_AUTH_API_KEY_MANDATORY); } // If Only Mutual SSL specified, set it as mandatory if (!isBasicAuth && !isOauth2 && !isApiKey && !isMutualSSLMandatory) { securityLevels.add(APIConstants.API_SECURITY_MUTUAL_SSL_MANDATORY); } // If OAuth2/Basic-Auth and Mutual SSL protected and not specified the mandatory scheme, // set OAuth2/Basic-Auth as mandatory if ((isOauth2 || isBasicAuth || isApiKey) && isMutualSSL && !isOauthBasicAuthMandatory && !isMutualSSLMandatory) { securityLevels.add(APIConstants.API_SECURITY_OAUTH_BASIC_AUTH_API_KEY_MANDATORY); } return securityLevels; } /** * To validate the API Security options and set it. * * @param api Relevant API that need to be validated. */ private void validateAndSetAPISecurity(API api) { String apiSecurity = APIConstants.DEFAULT_API_SECURITY_OAUTH2; String security = api.getApiSecurity(); if (security!= null) { apiSecurity = security; ArrayList<String> securityLevels = selectSecurityLevels(apiSecurity); apiSecurity = String.join(",", securityLevels); } if (log.isDebugEnabled()) { log.debug("API " + api.getId() + " has following enabled protocols : " + apiSecurity); } api.setApiSecurity(apiSecurity); } /** * To validate the API Security options and set it. * * @param apiProduct Relevant APIProduct that need to be validated. */ private void validateAndSetAPISecurity(APIProduct apiProduct) { String apiSecurity = APIConstants.DEFAULT_API_SECURITY_OAUTH2; String security = apiProduct.getApiSecurity(); if (security!= null) { apiSecurity = security; ArrayList<String> securityLevels = selectSecurityLevels(apiSecurity); apiSecurity = String.join(",", securityLevels); } if (log.isDebugEnabled()) { log.debug("APIProduct " + apiProduct.getId() + " has following enabled protocols : " + apiSecurity); } apiProduct.setApiSecurity(apiSecurity); } private void checkIfValidTransport(String transport) throws APIManagementException { if (!Constants.TRANSPORT_HTTP.equalsIgnoreCase(transport) && !Constants.TRANSPORT_HTTPS.equalsIgnoreCase(transport) && !APIConstants.WS_PROTOCOL.equalsIgnoreCase(transport) && !APIConstants.WSS_PROTOCOL.equalsIgnoreCase(transport)) { handleException("Unsupported Transport [" + transport + ']'); } } private void removeFromGateway(API api, Set<APIRevisionDeployment> gatewaysToRemove, Set<String> environmentsToAdd) { Set<String> environmentsToAddSet = new HashSet<>(environmentsToAdd); Set<String> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : gatewaysToRemove) { environmentsToRemove.add(apiRevisionDeployment.getDeployment()); } environmentsToRemove.removeAll(environmentsToAdd); APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); gatewayManager.unDeployFromGateway(api, tenantDomain, environmentsToRemove); if (log.isDebugEnabled()) { String logMessage = "API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + " deleted from gateway"; log.debug(logMessage); } } public API createNewAPIVersion(String existingApiId, String newVersion, Boolean isDefaultVersion, String organization) throws APIManagementException { API existingAPI = getAPIbyUUID(existingApiId, organization); if (existingAPI == null) { throw new APIMgtResourceNotFoundException("API not found for id " + existingApiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, existingApiId)); } if (newVersion.equals(existingAPI.getId().getVersion())) { throw new APIMgtResourceAlreadyExistsException( "Version " + newVersion + " exists for api " + existingAPI.getId().getApiName()); } existingAPI.setOrganization(organization); APIIdentifier existingAPIId = existingAPI.getId(); String existingAPICreatedTime = existingAPI.getCreatedTime(); String existingAPIStatus = existingAPI.getStatus(); boolean isExsitingAPIdefaultVersion = existingAPI.isDefaultVersion(); String existingContext = existingAPI.getContext(); APIIdentifier newApiId = new APIIdentifier(existingAPI.getId().getProviderName(), existingAPI.getId().getApiName(), newVersion); existingAPI.setUuid(null); existingAPI.setId(newApiId); existingAPI.setStatus(APIConstants.CREATED); existingAPI.setDefaultVersion(isDefaultVersion); // We need to change the context by setting the new version // This is a change that is coming with the context version strategy String existingAPIContextTemplate = existingAPI.getContextTemplate(); existingAPI.setContext(existingAPIContextTemplate.replace("{version}", newVersion)); API newAPI = addAPI(existingAPI); String newAPIId = newAPI.getUuid(); // copy docs List<Documentation> existingDocs = getAllDocumentation(existingApiId, organization); if (existingDocs != null) { for (Documentation documentation : existingDocs) { Documentation newDoc = addDocumentation(newAPIId, documentation, organization); DocumentationContent content = getDocumentationContent(existingApiId, documentation.getId(), organization); // TODO see whether we can optimize this if (content != null) { addDocumentationContent(newAPIId, newDoc.getId(), organization, content); } } } // copy icon ResourceFile icon = getIcon(existingApiId, organization); if (icon != null) { setThumbnailToAPI(newAPIId, icon, organization); } // copy sequences List<Mediation> mediationPolicies = getAllApiSpecificMediationPolicies(existingApiId, organization); if (mediationPolicies != null) { for (Mediation mediation : mediationPolicies) { Mediation policy = getApiSpecificMediationPolicyByPolicyId(existingApiId, mediation.getUuid(), organization); addApiSpecificMediationPolicy(newAPIId, policy, organization); } } // copy wsdl if (existingAPI.getWsdlUrl() != null) { ResourceFile wsdl = getWSDL(existingApiId, organization); if (wsdl != null) { addWSDLResource(newAPIId, wsdl, null, organization); } } // copy graphql definition String graphQLSchema = getGraphqlSchemaDefinition(existingApiId, organization); if(graphQLSchema != null) { saveGraphqlSchemaDefinition(newAPIId, graphQLSchema, organization); } // update old api // revert back to old values before update. existingAPI.setUuid(existingApiId); existingAPI.setStatus(existingAPIStatus); existingAPI.setId(existingAPIId); existingAPI.setContext(existingContext); existingAPI.setCreatedTime(existingAPICreatedTime); // update existing api with setLatest to false existingAPI.setLatest(false); if (isDefaultVersion) { existingAPI.setDefaultVersion(false); } else { existingAPI.setDefaultVersion(isExsitingAPIdefaultVersion); } try { apiPersistenceInstance.updateAPI(new Organization(organization), APIMapper.INSTANCE.toPublisherApi(existingAPI)); } catch (APIPersistenceException e) { throw new APIManagementException("Error while updating API details", e); } return getAPIbyUUID(newAPIId, organization); } public String retrieveServiceKeyByApiId(int apiId, int tenantId) throws APIManagementException { return apiMgtDAO.retrieveServiceKeyByApiId(apiId, tenantId); } private void copySequencesToNewVersion(API api, String newVersion, String pathFlow) throws Exception { String seqFilePath = APIUtil.getSequencePath(api.getId(), pathFlow); if (registry.resourceExists(seqFilePath)) { APIIdentifier newApiId = new APIIdentifier(api.getId().getProviderName(), api.getId().getApiName(), newVersion); String seqNewFilePath = APIUtil.getSequencePath(newApiId, pathFlow); org.wso2.carbon.registry.api.Collection seqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(seqFilePath); if (seqCollection != null) { String[] seqChildPaths = seqCollection.getChildren(); for (String seqChildPath : seqChildPaths) { Resource sequence = registry.get(seqChildPath); ResourceFile seqFile = new ResourceFile(sequence.getContentStream(), sequence.getMediaType()); OMElement seqElement = APIUtil.buildOMElement(sequence.getContentStream()); String seqFileName = seqElement.getAttributeValue(new QName("name")); addResourceFile(api.getId(), seqNewFilePath + seqFileName, seqFile); } } } } /** * Removes a given documentation * * @param apiId APIIdentifier * @param docType the type of the documentation * @param docName name of the document * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to remove documentation */ public void removeDocumentation(APIIdentifier apiId, String docName, String docType, String orgId) throws APIManagementException { String docPath = APIUtil.getAPIDocPath(apiId) + docName; try { String apiArtifactId = registry.get(docPath).getUUID(); GenericArtifactManager artifactManager = APIUtil .getArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when deleting documentation of API " + apiId + " document type " + docType + " document name " + docName; log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiArtifactId); String docFilePath = artifact.getAttribute(APIConstants.DOC_FILE_PATH); if (docFilePath != null) { File tempFile = new File(docFilePath); String fileName = tempFile.getName(); docFilePath = APIUtil.getDocumentationFilePath(apiId, fileName); if (registry.resourceExists(docFilePath)) { registry.delete(docFilePath); } } } catch (RegistryException e) { handleException("Failed to delete documentation", e); } } /** * @param id Identifier * @param docId UUID of the doc * @throws APIManagementException if failed to remove documentation */ public void removeDocumentation(Identifier id, String docId, String orgId) throws APIManagementException { String uuid; if (id.getUUID() == null) { uuid = id.getUUID(); } else { uuid = apiMgtDAO.getUUIDFromIdentifier(id.getProviderName(), id.getName(), id.getVersion(), orgId); } removeDocumentation(uuid, docId, orgId); } @Override public void removeDocumentation(String apiId, String docId, String organization) throws APIManagementException { try { apiPersistenceInstance.deleteDocumentation(new Organization(organization), apiId, docId); } catch (DocumentationPersistenceException e) { throw new APIManagementException("Error while deleting the document " + docId); } } /** * This method used to save the documentation content * * @param api, API * @param documentationName, name of the inline documentation * @param text, content of the inline documentation * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to add the document as a resource to registry */ public void addDocumentationContent(API api, String documentationName, String text) throws APIManagementException { APIIdentifier identifier = api.getId(); String documentationPath = APIUtil.getAPIDocPath(identifier) + documentationName; String contentPath = APIUtil.getAPIDocPath(identifier) + APIConstants.INLINE_DOCUMENT_CONTENT_DIR + RegistryConstants.PATH_SEPARATOR + documentationName; boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } Resource docResource = registry.get(documentationPath); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact docArtifact = artifactManager.getGenericArtifact(docResource.getUUID()); Documentation doc = APIUtil.getDocumentation(docArtifact); Resource docContent; if (!registry.resourceExists(contentPath)) { docContent = registry.newResource(); } else { docContent = registry.get(contentPath); } /* This is a temporary fix for doc content replace issue. We need to add * separate methods to add inline content resource in document update */ if (!APIConstants.NO_CONTENT_UPDATE.equals(text)) { docContent.setContent(text); } docContent.setMediaType(APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE); registry.put(contentPath, docContent); String apiPath = APIUtil.getAPIPath(identifier); String[] authorizedRoles = getAuthorizedRoles(apiPath); String docVisibility = doc.getVisibility().name(); String visibility = api.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } APIUtil.setResourcePermissions(api.getId().getProviderName(),visibility, authorizedRoles,contentPath, registry); } catch (RegistryException e) { String msg = "Failed to add the documentation content of : " + documentationName + " of API :" + identifier.getApiName(); handleException(msg, e); } catch (UserStoreException e) { String msg = "Failed to add the documentation content of : " + documentationName + " of API :" + identifier.getApiName(); handleException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } /** * Updates a visibility of the documentation * * @param api API * @param documentation Documentation * @throws APIManagementException if failed to update visibility */ private void updateDocVisibility(API api, Documentation documentation) throws APIManagementException { try { GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry,APIConstants.DOCUMENTATION_KEY); if (artifactManager == null) { String errorMessage = "Artifact manager is null when updating documentation of API " + api.getId().getApiName(); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact(documentation.getId()); String[] authorizedRoles = new String[0]; String visibleRolesList = api.getVisibleRoles(); if (visibleRolesList != null) { authorizedRoles = visibleRolesList.split(","); } int tenantId; String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); try { tenantId = getTenantId(tenantDomain); GenericArtifact updateApiArtifact = APIUtil.createDocArtifactContent(artifact, api.getId(), documentation); artifactManager.updateGenericArtifact(updateApiArtifact); APIUtil.clearResourcePermissions(artifact.getPath(), api.getId(), tenantId); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), authorizedRoles, artifact.getPath(), registry); String docType = artifact.getAttribute(APIConstants.DOC_SOURCE_TYPE); if (APIConstants.IMPLEMENTATION_TYPE_INLINE.equals(docType) || APIConstants.IMPLEMENTATION_TYPE_MARKDOWN.equals(docType)) { String docContentPath = APIUtil.getAPIDocPath(api.getId()) + APIConstants .INLINE_DOCUMENT_CONTENT_DIR + RegistryConstants.PATH_SEPARATOR + artifact.getAttribute(APIConstants.DOC_NAME); APIUtil.clearResourcePermissions(docContentPath, api.getId(), tenantId); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), authorizedRoles, docContentPath, registry); } else if (APIConstants.IMPLEMENTATION_TYPE_FILE.equals(docType)) { String docFilePath = APIUtil.getDocumentationFilePath(api.getId(), artifact.getAttribute(APIConstants.DOC_FILE_PATH).split( APIConstants.DOCUMENT_FILE_DIR + RegistryConstants.PATH_SEPARATOR)[1]); APIUtil.clearResourcePermissions(docFilePath, api.getId(), tenantId); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), authorizedRoles, docFilePath, registry); } } catch (UserStoreException e) { throw new APIManagementException("Error in retrieving Tenant Information while updating the " + "visibility of documentations for the API :" + api.getId().getApiName(), e); } } catch (RegistryException e) { handleException("Failed to update visibility of documentation" + api.getId().getApiName(), e); } } /** * Updates a given documentation * * @param apiId id of the document * @param documentation Documentation * @param organization identifier of the organization * @return updated documentation Documentation * @throws APIManagementException if failed to update docs */ public Documentation updateDocumentation(String apiId, Documentation documentation, String organization) throws APIManagementException { if (documentation != null) { org.wso2.carbon.apimgt.persistence.dto.Documentation mappedDoc = DocumentMapper.INSTANCE .toDocumentation(documentation); try { org.wso2.carbon.apimgt.persistence.dto.Documentation updatedDoc = apiPersistenceInstance .updateDocumentation(new Organization(organization), apiId, mappedDoc); if (updatedDoc != null) { return DocumentMapper.INSTANCE.toDocumentation(updatedDoc); } } catch (DocumentationPersistenceException e) { handleException("Failed to add documentation", e); } } return null; } /** * Copies current Documentation into another version of the same API. * * @param toVersion Version to which Documentation should be copied. * @param apiId id of the APIIdentifier * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to copy docs */ public void copyAllDocumentation(APIIdentifier apiId, String toVersion) throws APIManagementException { String oldVersion = APIUtil.getAPIDocPath(apiId); String newVersion = APIConstants.API_ROOT_LOCATION + RegistryConstants.PATH_SEPARATOR + apiId.getProviderName() + RegistryConstants.PATH_SEPARATOR + apiId.getApiName() + RegistryConstants.PATH_SEPARATOR + toVersion + RegistryConstants.PATH_SEPARATOR + APIConstants.DOC_DIR; try { Resource resource = registry.get(oldVersion); if (resource instanceof org.wso2.carbon.registry.core.Collection) { String[] docsPaths = ((org.wso2.carbon.registry.core.Collection) resource).getChildren(); for (String docPath : docsPaths) { registry.copy(docPath, newVersion); } } } catch (RegistryException e) { handleException("Failed to copy docs to new version : " + newVersion, e); } } /** * Create an Api * * @param api API * @throws APIManagementException if failed to create API */ protected String createAPI(API api) throws APIManagementException { GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when creating API " + api.getId().getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } if (api.isEndpointSecured() && StringUtils.isEmpty(api.getEndpointUTPassword())) { String errorMessage = "Empty password is given for endpointSecurity when creating API " + api.getId().getApiName(); throw new APIManagementException(errorMessage); } //Validate Transports validateAndSetTransports(api); validateAndSetAPISecurity(api); boolean transactionCommitted = false; String apiUUID = null; try { registry.beginTransaction(); GenericArtifact genericArtifact = artifactManager.newGovernanceArtifact(new QName(api.getId().getApiName())); if (genericArtifact == null) { String errorMessage = "Generic artifact is null when creating API " + api.getId().getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = APIUtil.createAPIArtifactContent(genericArtifact, api); artifactManager.addGenericArtifact(artifact); //Attach the API lifecycle artifact.attachLifecycle(APIConstants.API_LIFE_CYCLE); String artifactPath = GovernanceUtils.getArtifactPath(registry, artifact.getId()); String providerPath = APIUtil.getAPIProviderPath(api.getId()); //provider ------provides----> API registry.addAssociation(providerPath, artifactPath, APIConstants.PROVIDER_ASSOCIATION); Set<String> tagSet = api.getTags(); if (tagSet != null) { for (String tag : tagSet) { registry.applyTag(artifactPath, tag); } } if (APIUtil.isValidWSDLURL(api.getWsdlUrl(), false)) { String path = APIUtil.createWSDL(registry, api); updateWSDLUriInAPIArtifact(path, artifactManager, artifact, artifactPath); } if (api.getWsdlResource() != null) { String path = APIUtil.saveWSDLResource(registry, api); updateWSDLUriInAPIArtifact(path, artifactManager, artifact, artifactPath); } //write API Status to a separate property. This is done to support querying APIs using custom query (SQL) //to gain performance String apiStatus = api.getStatus(); saveAPIStatus(artifactPath, apiStatus); String visibleRolesList = api.getVisibleRoles(); String[] visibleRoles = new String[0]; if (visibleRolesList != null) { visibleRoles = visibleRolesList.split(","); } String publisherAccessControlRoles = api.getAccessControlRoles(); updateRegistryResources(artifactPath, publisherAccessControlRoles, api.getAccessControl(), api.getAdditionalProperties()); APIUtil.setResourcePermissions(api.getId().getProviderName(), api.getVisibility(), visibleRoles, artifactPath, registry); registry.commitTransaction(); transactionCommitted = true; if (log.isDebugEnabled()) { String logMessage = "API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + " created"; log.debug(logMessage); } apiUUID = artifact.getId(); } catch (RegistryException e) { try { registry.rollbackTransaction(); } catch (RegistryException re) { // Throwing an error here would mask the original exception log.error("Error while rolling back the transaction for API: " + api.getId().getApiName(), re); } handleException("Error while performing registry transaction operation", e); } catch (APIManagementException e) { handleException("Error while creating API", e); } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException ex) { handleException("Error while rolling back the transaction for API: " + api.getId().getApiName(), ex); } } return apiUUID; } /** * Update WSDLUri in the API Registry artifact * * @param wsdlPath WSDL Registry Path * @param artifactManager Artifact Manager * @param artifact API Artifact * @param artifactPath API Artifact path * @throws RegistryException when error occurred while updating WSDL path */ private void updateWSDLUriInAPIArtifact(String wsdlPath, GenericArtifactManager artifactManager, GenericArtifact artifact, String artifactPath) throws RegistryException { if (wsdlPath != null) { artifact.setAttribute(APIConstants.API_OVERVIEW_WSDL, wsdlPath); artifactManager.updateGenericArtifact(artifact); //update the artifact } } /** * Create a documentation * * @param api API * @param documentation Documentation * @throws APIManagementException if failed to add documentation */ private void createDocumentation(API api, Documentation documentation) throws APIManagementException { try { APIIdentifier apiId = api.getId(); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact artifact = artifactManager.newGovernanceArtifact(new QName(documentation.getName())); artifactManager.addGenericArtifact(APIUtil.createDocArtifactContent(artifact, apiId, documentation)); String apiPath = APIUtil.getAPIPath(apiId); String docVisibility = documentation.getVisibility().name(); String[] authorizedRoles = getAuthorizedRoles(apiPath); String visibility = api.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } APIUtil.setResourcePermissions(api.getId().getProviderName(),visibility, authorizedRoles, artifact .getPath(), registry); String docFilePath = artifact.getAttribute(APIConstants.DOC_FILE_PATH); if (docFilePath != null && !"".equals(docFilePath)) { //The docFilePatch comes as /t/tenanatdoman/registry/resource/_system/governance/apimgt/applicationdata.. //We need to remove the /t/tenanatdoman/registry/resource/_system/governance section to set permissions. int startIndex = docFilePath.indexOf(APIConstants.GOVERNANCE) + (APIConstants.GOVERNANCE).length(); String filePath = docFilePath.substring(startIndex, docFilePath.length()); APIUtil.setResourcePermissions(api.getId().getProviderName(),visibility, authorizedRoles, filePath, registry); } documentation.setId(artifact.getId()); } catch (RegistryException e) { handleException("Failed to add documentation", e); } catch (UserStoreException e) { handleException("Failed to add documentation", e); } } @Override public Documentation addDocumentation(String uuid, Documentation documentation, String organization) throws APIManagementException { if (documentation != null) { org.wso2.carbon.apimgt.persistence.dto.Documentation mappedDoc = DocumentMapper.INSTANCE .toDocumentation(documentation); try { org.wso2.carbon.apimgt.persistence.dto.Documentation addedDoc = apiPersistenceInstance.addDocumentation( new Organization(organization), uuid, mappedDoc); if (addedDoc != null) { return DocumentMapper.INSTANCE.toDocumentation(addedDoc); } } catch (DocumentationPersistenceException e) { handleException("Failed to add documentation", e); } } return null; } @Override public boolean isDocumentationExist(String uuid, String docName, String organization) throws APIManagementException { boolean exist = false; UserContext ctx = null; try { DocumentSearchResult result = apiPersistenceInstance.searchDocumentation(new Organization(organization), uuid, 0, 0, "name:" + docName, ctx); if (result != null && result.getDocumentationList() != null && !result.getDocumentationList().isEmpty()) { String returnDocName = result.getDocumentationList().get(0).getName(); if (returnDocName != null && returnDocName.equals(docName)) { exist = true; } } } catch (DocumentationPersistenceException e) { handleException("Failed to search documentation for name " + docName, e); } return exist; } private String[] getAuthorizedRoles(String artifactPath) throws UserStoreException { String resourcePath = RegistryUtils.getAbsolutePath(RegistryContext.getBaseInstance(), APIUtil.getMountedPath(RegistryContext.getBaseInstance(), RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH) + artifactPath); if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { int tenantId = ServiceReferenceHolder.getInstance().getRealmService(). getTenantManager().getTenantId(tenantDomain); AuthorizationManager authManager = ServiceReferenceHolder.getInstance().getRealmService(). getTenantUserRealm(tenantId).getAuthorizationManager(); return authManager.getAllowedRolesForResource(resourcePath, ActionConstants.GET); } else { RegistryAuthorizationManager authorizationManager = new RegistryAuthorizationManager (ServiceReferenceHolder.getUserRealm()); return authorizationManager.getAllowedRolesForResource(resourcePath, ActionConstants.GET); } } /** * Returns the details of all the life-cycle changes done per api * * @param apiId API Identifier * @param organization Organization * @return List of lifecycle events per given api * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to get Lifecycle Events */ public List<LifeCycleEvent> getLifeCycleEvents(APIIdentifier apiId, String organization) throws APIManagementException { return apiMgtDAO.getLifeCycleEvents(apiId, organization); } /** * Update the subscription status * * @param apiId API Identifier * @param subStatus Subscription Status * @param appId Application Id * @param organization Organization * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ public void updateSubscription(APIIdentifier apiId, String subStatus, int appId, String organization) throws APIManagementException { apiMgtDAO.updateSubscription(apiId, subStatus, appId, organization); } /** * This method is used to update the subscription * * @param subscribedAPI subscribedAPI object that represents the new subscription detals * @throws APIManagementException if failed to update subscription */ public void updateSubscription(SubscribedAPI subscribedAPI) throws APIManagementException { apiMgtDAO.updateSubscription(subscribedAPI); subscribedAPI = apiMgtDAO.getSubscriptionByUUID(subscribedAPI.getUUID()); Identifier identifier = subscribedAPI.getApiId() != null ? subscribedAPI.getApiId() : subscribedAPI.getProductId(); String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(identifier.getProviderName())); SubscriptionEvent subscriptionEvent = new SubscriptionEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.SUBSCRIPTIONS_UPDATE.name(), tenantId, tenantDomain, subscribedAPI.getSubscriptionId(), subscribedAPI.getUUID(), identifier.getId(), identifier.getUUID(), subscribedAPI.getApplication().getId(), subscribedAPI.getApplication().getUUID(), subscribedAPI.getTier().getName(), subscribedAPI.getSubStatus()); APIUtil.sendNotification(subscriptionEvent, APIConstants.NotifierType.SUBSCRIPTIONS.name()); } public void deleteAPI(String apiUuid, String organization) throws APIManagementException { boolean isError = false; int apiId = -1; API api = null; // get api object by uuid try { api = getAPIbyUUID(apiUuid, organization); } catch (APIManagementException e) { log.error("Error while getting API by uuid for deleting API " + apiUuid + " on organization " + organization); log.debug("Following steps will be skipped while deleting API " + apiUuid + "on organization " + organization + " due to api being null. " + "deleting Resource Registration from key managers, deleting on external API stores, " + "event publishing to gateways, logging audit message, extracting API details for " + "the recommendation system. " ); isError = true; } // get api id from db try { apiId = apiMgtDAO.getAPIID(apiUuid); } catch (APIManagementException e) { log.error("Error while getting API ID from DB for deleting API " + apiUuid + " on organization " + organization, e); log.debug("Following steps will be skipped while deleting the API " + apiUuid + " on organization " + organization + "due to api id being null. cleanup workflow tasks of the API, " + "delete event publishing to gateways"); isError = true; } // DB delete operations if (!isError && api != null) { try { deleteAPIRevisions(apiUuid, organization); deleteAPIFromDB(api); if (log.isDebugEnabled()) { String logMessage = "API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + " successfully removed from the database."; log.debug(logMessage); } } catch (APIManagementException e) { log.error("Error while executing API delete operations on DB for API " + apiUuid + " on organization " + organization, e); isError = true; } } // Deleting Resource Registration from key managers if (api != null && api.getId() != null && api.getId().toString() != null) { Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.deleteRegisteredResourceByAPIId(api.getId().toString()); log.debug("API " + apiUuid + " on organization " + organization + " has successfully removed from the Key Manager " + keyManagerDtoEntry.getKey()); } catch (APIManagementException e) { log.error("Error while deleting Resource Registration for API " + apiUuid + " on organization " + organization + " in Key Manager " + keyManagerDtoEntry.getKey(), e); } } } } try { GatewayArtifactsMgtDAO.getInstance().deleteGatewayArtifacts(apiUuid); log.debug("API " + apiUuid + " on organization " + organization + " has successfully removed from the gateway artifacts."); } catch (APIManagementException e) { log.error("Error while executing API delete operation on gateway artifacts for API " + apiUuid, e); isError = true; } try { apiPersistenceInstance.deleteAPI(new Organization(organization), apiUuid); log.debug("API " + apiUuid + " on organization " + organization + " has successfully removed from the persistence instance."); } catch (APIPersistenceException e) { log.error("Error while executing API delete operation on persistence instance for API " + apiUuid + " on organization " + organization, e); isError = true; } // Deleting on external API stores if (api != null) { // gatewayType check is required when API Management is deployed on // other servers to avoid synapse //Check if there are already published external APIStores.If yes,removing APIs from them. Set<APIStore> apiStoreSet; try { apiStoreSet = getPublishedExternalAPIStores(apiUuid); WSO2APIPublisher wso2APIPublisher = new WSO2APIPublisher(); if (apiStoreSet != null && !apiStoreSet.isEmpty()) { for (APIStore store : apiStoreSet) { wso2APIPublisher.deleteFromStore(api.getId(), APIUtil.getExternalAPIStore(store.getName(), tenantId)); } } } catch (APIManagementException e) { log.error("Error while executing API delete operation on external API stores for API " + apiUuid + " on organization " + organization, e); isError = true; } } if (apiId != -1) { try { cleanUpPendingAPIStateChangeTask(apiId); } catch (WorkflowException | APIManagementException e) { log.error("Error while executing API delete operation on cleanup workflow tasks for API " + apiUuid + " on organization " + organization, e); isError = true; } } // Delete event publishing to gateways if (api != null && apiId != -1) { APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_DELETE.name(), tenantId, tenantDomain, api.getId().getApiName(), apiId, api.getUuid(), api.getId().getVersion(), api.getType(), api.getContext(), APIUtil.replaceEmailDomainBack(api.getId().getProviderName()), api.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); } else { log.debug("Event has not published to gateways due to API id has failed to retrieve from DB for API " + apiUuid + " on organization " + organization); } // Logging audit message for API delete if (api != null) { JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, api.getId().getApiName()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, api.getId().getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, api.getId().getProviderName()); APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API, apiLogObject.toString(), APIConstants.AuditLogConstants.DELETED, this.username); } // Extracting API details for the recommendation system if (api != null && recommendationEnvironment != null) { RecommenderEventPublisher extractor = new RecommenderDetailsExtractor(api, tenantDomain, APIConstants.DELETE_API); Thread recommendationThread = new Thread(extractor); recommendationThread.start(); } // if one of the above has failed throw an error if (isError) { throw new APIManagementException("Error while deleting the API " + apiUuid + " on organization " + organization); } } /** * Deletes API from the database and delete local scopes and resource scope attachments from KM. * * @param api API to delete * @throws APIManagementException if fails to delete the API */ private void deleteAPIFromDB(API api) throws APIManagementException { APIIdentifier apiIdentifier = api.getId(); int tenantId = APIUtil.getTenantId(APIUtil.replaceEmailDomainBack(apiIdentifier.getProviderName())); String tenantDomain = APIUtil.getTenantDomainFromTenantId(tenantId); // Get local scopes for the given API which are not already assigned for different versions of the same API Set<String> localScopeKeysToDelete = apiMgtDAO.getUnversionedLocalScopeKeysForAPI(api.getUuid(), tenantId); // Get the URI Templates for the given API to detach the resources scopes from Set<URITemplate> uriTemplates = apiMgtDAO.getURITemplatesOfAPI(api.getUuid()); // Detach all the resource scopes from the API resources in KM Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.detachResourceScopes(api, uriTemplates); if (log.isDebugEnabled()) { log.debug("Resource scopes are successfully detached for the API : " + apiIdentifier + " from Key Manager :" + keyManagerDtoEntry.getKey() + "."); } // remove the local scopes from the KM for (String localScope : localScopeKeysToDelete) { keyManager.deleteScope(localScope); } if (log.isDebugEnabled()) { log.debug("Local scopes are successfully deleted for the API : " + apiIdentifier + " from Key Manager : " + keyManagerDtoEntry.getKey() + "."); } } catch (APIManagementException e) { log.error("Error while Detach and Delete Scope from Key Manager " + keyManagerDtoEntry.getKey(), e); } } } deleteScopes(localScopeKeysToDelete, tenantId); apiMgtDAO.deleteAPI(api.getUuid()); if (log.isDebugEnabled()) { log.debug("API : " + apiIdentifier + " is successfully deleted from the database and Key Manager."); } } private void deleteAPIRevisions(String apiUUID, String organization) throws APIManagementException { List<APIRevision> apiRevisionList = apiMgtDAO.getRevisionsListByAPIUUID(apiUUID); for (APIRevision apiRevision : apiRevisionList) { if (apiRevision.getApiRevisionDeploymentList().size() != 0) { undeployAPIRevisionDeployment(apiUUID, apiRevision.getRevisionUUID(), apiRevision.getApiRevisionDeploymentList(), organization); } deleteAPIRevision(apiUUID, apiRevision.getRevisionUUID(), organization); } } private void deleteAPIProductRevisions(String apiProductUUID, String organization) throws APIManagementException { List<APIRevision> apiRevisionList = apiMgtDAO.getRevisionsListByAPIUUID(apiProductUUID); for (APIRevision apiRevision : apiRevisionList) { if (apiRevision.getApiRevisionDeploymentList().size() != 0) { undeployAPIProductRevisionDeployment(apiProductUUID, apiRevision.getRevisionUUID(), apiRevision.getApiRevisionDeploymentList()); } deleteAPIProductRevision(apiProductUUID, apiRevision.getRevisionUUID(), organization); } } public Map<Documentation, API> searchAPIsByDoc(String searchTerm, String searchType) throws APIManagementException { return searchAPIDoc(registry, tenantId, username, searchTerm); } /** * Search APIs based on given search term * * @param searchTerm * @param searchType * @param providerId * @throws APIManagementException */ @Deprecated public List<API> searchAPIs(String searchTerm, String searchType, String providerId) throws APIManagementException { List<API> foundApiList = new ArrayList<API>(); String regex = "(?i)[\\w.|-]*" + searchTerm.trim() + "[\\w.|-]*"; Pattern pattern; Matcher matcher; String apiConstant = null; try { if (providerId != null) { List<API> apiList = getAPIsByProvider(providerId); if (apiList == null || apiList.isEmpty()) { return apiList; } pattern = Pattern.compile(regex); for (API api : apiList) { if ("Name".equalsIgnoreCase(searchType)) { apiConstant = api.getId().getApiName(); } else if ("Provider".equalsIgnoreCase(searchType)) { apiConstant = api.getId().getProviderName(); } else if ("Version".equalsIgnoreCase(searchType)) { apiConstant = api.getId().getVersion(); } else if ("Context".equalsIgnoreCase(searchType)) { apiConstant = api.getContext(); } else if ("Status".equalsIgnoreCase(searchType)) { apiConstant = api.getStatus(); } else if (APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE.equalsIgnoreCase(searchType)) { apiConstant = api.getDescription(); } if (apiConstant != null) { matcher = pattern.matcher(apiConstant); if (matcher.find()) { foundApiList.add(api); } } } } else { foundApiList = searchAPIs(searchTerm, searchType); } } catch (APIManagementException e) { handleException("Failed to search APIs with type", e); } Collections.sort(foundApiList, new APINameComparator()); return foundApiList; } /** * Search APIs * * @param searchTerm * @param searchType * @return * @throws APIManagementException */ @Deprecated private List<API> searchAPIs(String searchTerm, String searchType) throws APIManagementException { List<API> apiList = new ArrayList<API>(); Pattern pattern; Matcher matcher; String searchCriteria = APIConstants.API_OVERVIEW_NAME; boolean isTenantFlowStarted = false; String userName = this.username; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(userName); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager != null) { if ("Name".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_NAME; } else if ("Version".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_VERSION; } else if ("Context".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_CONTEXT; } else if (APIConstants.THROTTLE_TIER_DESCRIPTION_ATTRIBUTE.equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_DESCRIPTION; } else if ("Provider".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_PROVIDER; searchTerm = searchTerm.replaceAll("@", "-AT-"); } else if ("Status".equalsIgnoreCase(searchType)) { searchCriteria = APIConstants.API_OVERVIEW_STATUS; } String regex = "(?i)[\\w.|-]*" + searchTerm.trim() + "[\\w.|-]*"; pattern = Pattern.compile(regex); if ("Subcontext".equalsIgnoreCase(searchType)) { List<API> allAPIs = getAllAPIs(); for (API api : allAPIs) { Set<URITemplate> urls = api.getUriTemplates(); for (URITemplate url : urls) { matcher = pattern.matcher(url.getUriTemplate()); if (matcher.find()) { apiList.add(api); break; } } } } else { GenericArtifact[] genericArtifacts = artifactManager.getAllGenericArtifacts(); if (genericArtifacts == null || genericArtifacts.length == 0) { return apiList; } for (GenericArtifact artifact : genericArtifacts) { String value = artifact.getAttribute(searchCriteria); if (value != null) { matcher = pattern.matcher(value); if (matcher.find()) { API resultAPI = getAPI(artifact); if (resultAPI != null) { apiList.add(resultAPI); } } } } } } else { String errorMessage = "Failed to retrieve artifact manager when searching APIs for term " + searchTerm + " in tenant domain " + tenantDomain; log.error(errorMessage); throw new APIManagementException(errorMessage); } } catch (RegistryException e) { handleException("Failed to search APIs with type", e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return apiList; } /** * Update the Tier Permissions * * @param tierName Tier Name * @param permissionType Permission Type * @param roles Roles * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ public void updateTierPermissions(String tierName, String permissionType, String roles) throws APIManagementException { apiMgtDAO.updateTierPermissions(tierName, permissionType, roles, tenantId); } @Override public void deleteTierPermissions(String tierName) throws APIManagementException { apiMgtDAO.deleteThrottlingPermissions(tierName, tenantId); } @Override public Set<TierPermissionDTO> getTierPermissions() throws APIManagementException { return apiMgtDAO.getTierPermissions(tenantId); } @Override public TierPermissionDTO getThrottleTierPermission(String tierName) throws APIManagementException { return apiMgtDAO.getThrottleTierPermission(tierName, tenantId); } /** * Update the Tier Permissions * * @param tierName Tier Name * @param permissionType Permission Type * @param roles Roles * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ public void updateThrottleTierPermissions(String tierName, String permissionType, String roles) throws APIManagementException { apiMgtDAO.updateThrottleTierPermissions(tierName, permissionType, roles, tenantId); } @Override public Set<TierPermissionDTO> getThrottleTierPermissions() throws APIManagementException { return apiMgtDAO.getThrottleTierPermissions(tenantId); } /** * Publish API to external stores given by external store Ids * * @param api API which need to published * @param externalStoreIds APIStore Ids which need to publish API * @throws APIManagementException If failed to publish to external stores */ @Override public boolean publishToExternalAPIStores(API api, List<String> externalStoreIds) throws APIManagementException { Set<APIStore> inputStores = new HashSet<>(); boolean apiOlderVersionExist = false; APIIdentifier apiIdentifier = api.getId(); for (String store : externalStoreIds) { if (StringUtils.isNotEmpty(store)) { APIStore inputStore = APIUtil.getExternalAPIStore(store, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); if (inputStore == null) { String errorMessage = "Error while publishing to external stores. Invalid External Store Id: " + store; log.error(errorMessage); ExceptionCodes exceptionCode = ExceptionCodes.EXTERNAL_STORE_ID_NOT_FOUND; throw new APIManagementException(errorMessage, new ErrorItem(exceptionCode.getErrorMessage(), errorMessage, exceptionCode.getErrorCode(), exceptionCode.getHttpStatusCode())); } inputStores.add(inputStore); } } Set<String> versions = getAPIVersions(apiIdentifier.getProviderName(), apiIdentifier.getName(), api.getOrganization()); APIVersionStringComparator comparator = new APIVersionStringComparator(); for (String tempVersion : versions) { if (comparator.compare(tempVersion, apiIdentifier.getVersion()) < 0) { apiOlderVersionExist = true; break; } } return updateAPIsInExternalAPIStores(api, inputStores, apiOlderVersionExist); } /** * When enabled publishing to external APIStores support,publish the API to external APIStores * * @param api The API which need to published * @param apiStoreSet The APIStores set to which need to publish API * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ @Override public void publishToExternalAPIStores(API api, Set<APIStore> apiStoreSet, boolean apiOlderVersionExist) throws APIManagementException { Set<APIStore> publishedStores = new HashSet<APIStore>(); StringBuilder errorStatus = new StringBuilder("Failure to publish to External Stores : "); boolean failure = false; for (APIStore store : apiStoreSet) { org.wso2.carbon.apimgt.api.model.APIPublisher publisher = store.getPublisher(); try { // First trying to publish the API to external APIStore boolean published; String version = ApiMgtDAO.getInstance().getLastPublishedAPIVersionFromAPIStore(api.getId(), store.getName()); if (apiOlderVersionExist && version != null && !(publisher instanceof WSO2APIPublisher)) { published = publisher.createVersionedAPIToStore(api, store, version); publisher.updateToStore(api, store); } else { published = publisher.publishToStore(api, store); } if (published) { // If published,then save to database. publishedStores.add(store); } } catch (APIManagementException e) { failure = true; log.error(e); errorStatus.append(store.getDisplayName()).append(','); } } if (!publishedStores.isEmpty()) { addExternalAPIStoresDetails(api.getUuid(), publishedStores); } if (failure) { throw new APIManagementException(errorStatus.substring(0, errorStatus.length() - 2)); } } /** * Update the API to external APIStores and database * * @param api The API which need to published * @param apiStoreSet The APIStores set to which need to publish API * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ @Override public boolean updateAPIsInExternalAPIStores(API api, Set<APIStore> apiStoreSet, boolean apiOlderVersionExist) throws APIManagementException { Set<APIStore> publishedStores = getPublishedExternalAPIStores(api.getUuid()); Set<APIStore> notPublishedAPIStores = new HashSet<APIStore>(); Set<APIStore> updateApiStores = new HashSet<APIStore>(); Set<APIStore> removedApiStores = new HashSet<APIStore>(); StringBuilder errorStatus = new StringBuilder("Failed to update External Stores : "); boolean failure = false; if (publishedStores != null) { removedApiStores.addAll(publishedStores); removedApiStores.removeAll(apiStoreSet); } for (APIStore apiStore : apiStoreSet) { boolean publishedToStore = false; if (publishedStores != null) { for (APIStore store : publishedStores) { //If selected external store in edit page is already saved in db if (store.equals(apiStore)) { //Check if there's a modification happened in config file external store definition try { if (!isAPIAvailableInExternalAPIStore(api, apiStore)) { // API is not available continue; } } catch (APIManagementException e) { failure = true; log.error(e); errorStatus.append(store.getDisplayName()).append(','); } publishedToStore = true; //Already the API has published to external APIStore //In this case,the API is already added to external APIStore,thus we don't need to publish it again. //We need to update the API in external Store. //Include to update API in external APIStore updateApiStores.add(APIUtil.getExternalAPIStore(store.getName(), tenantId)); } } } if (!publishedToStore) { //If the API has not yet published to selected external APIStore notPublishedAPIStores.add(APIUtil.getExternalAPIStore(apiStore.getName(), tenantId)); } } //Publish API to external APIStore which are not yet published try { publishToExternalAPIStores(api, notPublishedAPIStores, apiOlderVersionExist); } catch (APIManagementException e) { handleException("Failed to publish API to external Store. ", e); } //Update the APIs which are already exist in the external APIStore updateAPIInExternalAPIStores(api, updateApiStores); //Update database saved published APIStore details updateExternalAPIStoresDetails(api.getUuid(), updateApiStores); deleteFromExternalAPIStores(api, removedApiStores); if (failure) { throw new APIManagementException(errorStatus.substring(0, errorStatus.length() - 2)); } return true; } private void deleteFromExternalAPIStores(API api, Set<APIStore> removedApiStores) throws APIManagementException { Set<APIStore> removalCompletedStores = new HashSet<APIStore>(); StringBuilder errorStatus = new StringBuilder("Failed to delete from External Stores : "); boolean failure = false; for (APIStore store : removedApiStores) { org.wso2.carbon.apimgt.api.model.APIPublisher publisher = APIUtil.getExternalAPIStore(store.getName(), tenantId).getPublisher(); try { boolean deleted = publisher.deleteFromStore( api.getId(), APIUtil.getExternalAPIStore(store.getName(), tenantId)); if (deleted) { // If the attempt is successful, database will be // changed deleting the External store mappings. removalCompletedStores.add(store); } } catch (APIManagementException e) { failure = true; log.error(e); errorStatus.append(store.getDisplayName()).append(','); } } if (!removalCompletedStores.isEmpty()) { removeExternalAPIStoreDetails(api.getUuid(), removalCompletedStores); } if (failure) { throw new APIManagementException(errorStatus.substring(0, errorStatus.length() - 2)); } } private void removeExternalAPIStoreDetails(String id, Set<APIStore> removalCompletedStores) throws APIManagementException { apiMgtDAO.deleteExternalAPIStoresDetails(id, removalCompletedStores); } private boolean isAPIAvailableInExternalAPIStore(API api, APIStore store) throws APIManagementException { org.wso2.carbon.apimgt.api.model.APIPublisher publisher = store.getPublisher(); return publisher.isAPIAvailable(api, store); } /** * When enabled publishing to external APIStores support,updating the API existing in external APIStores * * @param api The API which need to published * @param apiStoreSet The APIStores set to which need to publish API * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ private void updateAPIInExternalAPIStores(API api, Set<APIStore> apiStoreSet) throws APIManagementException { if (apiStoreSet != null && !apiStoreSet.isEmpty()) { StringBuilder errorStatus = new StringBuilder("Failed to update External Stores : "); boolean failure = false; for (APIStore store : apiStoreSet) { try { org.wso2.carbon.apimgt.api.model.APIPublisher publisher = store.getPublisher(); publisher.updateToStore(api, store); } catch (APIManagementException e) { failure = true; log.error(e); errorStatus.append(store.getDisplayName()).append(','); } } if (failure) { throw new APIManagementException(errorStatus.substring(0, errorStatus.length() - 2)); } } } /** * When enabled publishing to external APIStores support,update external apistores data in db * * @param apiId The API Identifier which need to update in db * @param apiStoreSet The APIStores set which need to update in db * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ private void updateExternalAPIStoresDetails(String apiId, Set<APIStore> apiStoreSet) throws APIManagementException { apiMgtDAO.updateExternalAPIStoresDetails(apiId, apiStoreSet); } private boolean addExternalAPIStoresDetails(String apiId, Set<APIStore> apiStoreSet) throws APIManagementException { return apiMgtDAO.addExternalAPIStoresDetails(apiId, apiStoreSet); } /** * When enabled publishing to external APIStores support,get all the external apistore details which are * published and stored in db and which are not unpublished * * @param apiId The API uuid which need to update in db * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ @Override public Set<APIStore> getExternalAPIStores(String apiId) throws APIManagementException { if (APIUtil.isAPIsPublishToExternalAPIStores(tenantId)) { SortedSet<APIStore> sortedApiStores = new TreeSet<APIStore>(new APIStoreNameComparator()); Set<APIStore> publishedStores = apiMgtDAO.getExternalAPIStoresDetails(apiId); sortedApiStores.addAll(publishedStores); return APIUtil.getExternalAPIStores(sortedApiStores, tenantId); } else { return null; } } /** * When enabled publishing to external APIStores support,get only the published external apistore details which are * stored in db * * @param apiId The API uuid which need to update in db * @throws org.wso2.carbon.apimgt.api.APIManagementException If failed to update subscription status */ @Override public Set<APIStore> getPublishedExternalAPIStores(String apiId) throws APIManagementException { Set<APIStore> storesSet; SortedSet<APIStore> configuredAPIStores = new TreeSet<>(new APIStoreNameComparator()); configuredAPIStores.addAll(APIUtil.getExternalStores(tenantId)); if (APIUtil.isAPIsPublishToExternalAPIStores(tenantId)) { storesSet = apiMgtDAO.getExternalAPIStoresDetails(apiId); //Retains only the stores that contained in configuration storesSet.retainAll(configuredAPIStores); return storesSet; } return null; } /** * Get stored custom inSequences from governanceSystem registry * * @throws APIManagementException */ public List<String> getCustomInSequences(APIIdentifier apiIdentifier) throws APIManagementException { List<String> sequenceList = new ArrayList<String>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService().getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection inSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION); if (inSeqCollection != null) { String[] inSeqChildPaths = inSeqCollection.getChildren(); Arrays.sort(inSeqChildPaths); for (String inSeqChildPath : inSeqChildPaths) { Resource inSequence = registry.get(inSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(inSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + inSeqChildPath + "' from the registry.", e); } } } } String customInSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, "in"); if (registry.resourceExists(customInSeqFileLocation)) { org.wso2.carbon.registry.api.Collection inSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customInSeqFileLocation); if (inSeqCollection != null) { String[] inSeqChildPaths = inSeqCollection.getChildren(); Arrays.sort(inSeqChildPaths); for (String inSeqChildPath : inSeqChildPaths) { Resource inSequence = registry.get(inSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(inSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + inSeqChildPath + "' from the registry.", e); } } } } } catch (Exception e) { handleException("Issue is in getting custom InSequences from the Registry", e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return sequenceList; } /** * Get stored custom outSequences from governanceSystem registry * * @throws APIManagementException */ public List<String> getCustomOutSequences(APIIdentifier apiIdentifier) throws APIManagementException { List<String> sequenceList = new ArrayList<String>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection outSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION); if (outSeqCollection != null) { String[] outSeqChildPaths = outSeqCollection.getChildren(); Arrays.sort(outSeqChildPaths); for (String childPath : outSeqChildPaths) { Resource outSequence = registry.get(childPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + childPath + "' from the registry.", e); } } } } String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, "out"); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection outSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customOutSeqFileLocation); if (outSeqCollection != null) { String[] outSeqChildPaths = outSeqCollection.getChildren(); Arrays.sort(outSeqChildPaths); for (String outSeqChildPath : outSeqChildPaths) { Resource outSequence = registry.get(outSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + outSeqChildPath + "' from the registry.", e); } } } } } catch (Exception e) { handleException("Issue is in getting custom OutSequences from the Registry", e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return sequenceList; } /** * Get the list of Custom InSequences including API defined in sequences. * * @return List of available sequences * @throws APIManagementException */ public List<String> getCustomInSequences() throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection inSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_INSEQUENCE_LOCATION); if (inSeqCollection != null) { String[] inSeqChildPaths = inSeqCollection.getChildren(); Arrays.sort(inSeqChildPaths); for (String inSeqChildPath : inSeqChildPaths) { Resource inSequence = registry.get(inSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(inSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + inSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } return new ArrayList<>(sequenceList); } /** * Get the list of Custom InSequences including API defined in sequences. * * @return List of available sequences * @throws APIManagementException */ public List<String> getCustomOutSequences() throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection outSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION); if (outSeqCollection != null) { String[] outSeqChildPaths = outSeqCollection.getChildren(); Arrays.sort(outSeqChildPaths); for (String outSeqChildPath : outSeqChildPaths) { Resource outSequence = registry.get(outSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + outSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } return new ArrayList<>(sequenceList); } /** * Get stored custom fault sequences from governanceSystem registry * * @throws APIManagementException */ @Deprecated public List<String> getCustomFaultSequences() throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource outSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + faultSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } return new ArrayList<>(sequenceList); } /** * Get stored custom fault sequences from governanceSystem registry * * @throws APIManagementException */ public List<String> getCustomFaultSequences(APIIdentifier apiIdentifier) throws APIManagementException { List<String> sequenceList = new ArrayList<String>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (registry.resourceExists(APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get( APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource outSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + faultSeqChildPath + "' from the registry.", e); } } } } String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customOutSeqFileLocation); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource faultSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(faultSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + faultSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT + " sequences of " + apiIdentifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return sequenceList; } /** * Get the list of Custom in sequences of API. * * @return List of in sequences * @throws APIManagementException */ public List<String> getCustomApiInSequences(APIIdentifier apiIdentifier) throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); String customInSeqFileLocation = APIUtil .getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); if (registry.resourceExists(customInSeqFileLocation)) { org.wso2.carbon.registry.api.Collection inSeqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(customInSeqFileLocation); if (inSeqCollection != null) { String[] inSeqChildPaths = inSeqCollection.getChildren(); Arrays.sort(inSeqChildPaths); for (String inSeqChildPath : inSeqChildPaths) { Resource outSequence = registry.get(inSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + inSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN + " sequences of " + apiIdentifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return new ArrayList<>(sequenceList); } /** * Get the list of Custom out Sequences of API * * @return List of available out sequences * @throws APIManagementException */ public List<String> getCustomApiOutSequences(APIIdentifier apiIdentifier) throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection outSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customOutSeqFileLocation); if (outSeqCollection != null) { String[] outSeqChildPaths = outSeqCollection.getChildren(); Arrays.sort(outSeqChildPaths); for (String outSeqChildPath : outSeqChildPaths) { Resource outSequence = registry.get(outSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(outSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + outSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT + " sequences of " + apiIdentifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return new ArrayList<>(sequenceList); } /** * Get the list of Custom Fault Sequences of API. * * @return List of available fault sequences * @throws APIManagementException */ public List<String> getCustomApiFaultSequences(APIIdentifier apiIdentifier) throws APIManagementException { Set<String> sequenceList = new TreeSet<>(); boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (apiIdentifier.getProviderName().contains("-AT-")) { String provider = apiIdentifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); String customOutSeqFileLocation = APIUtil.getSequencePath(apiIdentifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); if (registry.resourceExists(customOutSeqFileLocation)) { org.wso2.carbon.registry.api.Collection faultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry .get(customOutSeqFileLocation); if (faultSeqCollection != null) { String[] faultSeqChildPaths = faultSeqCollection.getChildren(); Arrays.sort(faultSeqChildPaths); for (String faultSeqChildPath : faultSeqChildPaths) { Resource faultSequence = registry.get(faultSeqChildPath); try { OMElement seqElment = APIUtil.buildOMElement(faultSequence.getContentStream()); sequenceList.add(seqElment.getAttributeValue(new QName("name"))); } catch (OMException e) { log.info("Error occurred when reading the sequence '" + faultSeqChildPath + "' from the registry.", e); } } } } } catch (RegistryException e) { String msg = "Error while retrieving registry for tenant " + tenantId; log.error(msg); throw new APIManagementException(msg, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { String msg = "Error while processing the " + APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT + " sequences of " + apiIdentifier + " in the registry"; log.error(msg); throw new APIManagementException(msg, e); } catch (Exception e) { log.error(e.getMessage()); throw new APIManagementException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return new ArrayList<>(sequenceList); } @Override public boolean isSynapseGateway() throws APIManagementException { APIManagerConfiguration config = getAPIManagerConfiguration(); String gatewayType = config.getFirstProperty(APIConstants.API_GATEWAY_TYPE); return APIConstants.API_GATEWAY_TYPE_SYNAPSE.equalsIgnoreCase(gatewayType); } @Override public void validateResourceThrottlingTiers(API api, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Validating x-throttling tiers defined in swagger api definition resource"); } Set<URITemplate> uriTemplates = api.getUriTemplates(); checkResourceThrottlingTiersInURITemplates(uriTemplates, tenantDomain); } @Override public void validateResourceThrottlingTiers(String swaggerContent, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Validating x-throttling tiers defined in swagger api definition resource"); } APIDefinition apiDefinition = OASParserUtil.getOASParser(swaggerContent); Set<URITemplate> uriTemplates = apiDefinition.getURITemplates(swaggerContent); checkResourceThrottlingTiersInURITemplates(uriTemplates, tenantDomain); } @Override public void validateAPIThrottlingTier(API api, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Validating apiLevelPolicy defined in the API"); } Map<String, Tier> tierMap = APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain); if (tierMap != null) { String apiLevelPolicy = api.getApiLevelPolicy(); if (apiLevelPolicy != null && !tierMap.containsKey(apiLevelPolicy)) { String message = "Invalid API level throttling tier " + apiLevelPolicy + " found in api definition"; throw new APIManagementException(message); } } } @Override public void validateProductThrottlingTier(APIProduct apiProduct, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Validating productLevelPolicy defined in the API Product"); } Map<String, Tier> tierMap = APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain); if (tierMap != null) { String apiLevelPolicy = apiProduct.getProductLevelPolicy(); if (apiLevelPolicy != null && !tierMap.containsKey(apiLevelPolicy)) { String message = "Invalid Product level throttling tier " + apiLevelPolicy + " found in api definition"; throw new APIManagementException(message); } } } private void checkResourceThrottlingTiersInURITemplates(Set<URITemplate> uriTemplates, String tenantDomain) throws APIManagementException { Map<String, Tier> tierMap = APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain); if (tierMap != null) { for (URITemplate template : uriTemplates) { if (template.getThrottlingTier() != null && !tierMap.containsKey(template.getThrottlingTier())) { String message = "Invalid x-throttling tier " + template.getThrottlingTier() + " found in api definition for resource " + template.getHTTPVerb() + " " + template.getUriTemplate(); log.error(message); throw new APIManagementException(message); } } } } @Override public void saveSwagger20Definition(APIIdentifier apiId, String jsonText, String organization) throws APIManagementException { String uuid; if (apiId.getUUID() != null) { uuid = apiId.getUUID(); } else { uuid = apiMgtDAO.getUUIDFromIdentifier(apiId.getProviderName(), apiId.getApiName(), apiId.getVersion(), organization); } saveSwaggerDefinition(uuid, jsonText, organization); } @Override public void saveSwaggerDefinition(API api, String jsonText, String organization) throws APIManagementException { String apiId; if (api.getUuid() != null) { apiId = api.getUuid(); } else if (api.getId().getUUID() != null) { apiId = api.getId().getUUID(); } else { apiId = apiMgtDAO.getUUIDFromIdentifier(api.getId().getProviderName(), api.getId().getApiName(), api.getId().getVersion(), organization); } saveSwaggerDefinition(apiId, jsonText, organization); } @Override public void saveSwaggerDefinition(String apiId, String jsonText, String organization) throws APIManagementException { try { apiPersistenceInstance.saveOASDefinition(new Organization(organization), apiId, jsonText); } catch (OASPersistenceException e) { throw new APIManagementException("Error while persisting OAS definition ", e); } } @Override public void saveGraphqlSchemaDefinition(API api, String schemaDefinition) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); GraphQLSchemaDefinition schemaDef = new GraphQLSchemaDefinition(); schemaDef.saveGraphQLSchemaDefinition(api, schemaDefinition, registry); } finally { PrivilegedCarbonContext.endTenantFlow(); } } @Override public void saveSwagger20Definition(APIProductIdentifier apiId, String jsonText) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); saveAPIDefinition(getAPIProduct(apiId), jsonText, registry); } finally { PrivilegedCarbonContext.endTenantFlow(); } } @Override public void saveSwaggerDefinition(APIProduct apiProduct, String jsonText) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); saveAPIDefinition(apiProduct, jsonText, registry); } finally { PrivilegedCarbonContext.endTenantFlow(); } } private void saveAPIDefinition(APIProduct apiProduct, String apiDefinitionJSON, org.wso2.carbon.registry.api.Registry registry) throws APIManagementException { String apiName = apiProduct.getId().getName(); String apiVersion = apiProduct.getId().getVersion(); String apiProviderName = apiProduct.getId().getProviderName(); try { String resourcePath = APIUtil.getAPIProductOpenAPIDefinitionFilePath(apiName, apiVersion, apiProviderName); resourcePath = resourcePath + APIConstants.API_OAS_DEFINITION_RESOURCE_NAME; org.wso2.carbon.registry.api.Resource resource; if (!registry.resourceExists(resourcePath)) { resource = registry.newResource(); } else { resource = registry.get(resourcePath); } resource.setContent(apiDefinitionJSON); resource.setMediaType("application/json"); registry.put(resourcePath, resource); String[] visibleRoles = null; if (apiProduct.getVisibleRoles() != null) { visibleRoles = apiProduct.getVisibleRoles().split(","); } //Need to set anonymous if the visibility is public APIUtil.clearResourcePermissions(resourcePath, apiProduct.getId(), ((UserRegistry) registry).getTenantId()); APIUtil.setResourcePermissions(apiProviderName, apiProduct.getVisibility(), visibleRoles, resourcePath); } catch (org.wso2.carbon.registry.api.RegistryException e) { handleException("Error while adding Swagger Definition for " + apiName + '-' + apiVersion, e); } } @Override public void addAPIProductSwagger(String productId, Map<API, List<APIProductResource>> apiToProductResourceMapping, APIProduct apiProduct, String orgId) throws APIManagementException { APIDefinition parser = new OAS3Parser(); SwaggerData swaggerData = new SwaggerData(apiProduct); String apiProductSwagger = parser.generateAPIDefinition(swaggerData); apiProductSwagger = OASParserUtil.updateAPIProductSwaggerOperations(apiToProductResourceMapping, apiProductSwagger); saveSwaggerDefinition(productId, apiProductSwagger, orgId); apiProduct.setDefinition(apiProductSwagger); } @Override public void updateAPIProductSwagger(String productId, Map<API, List<APIProductResource>> apiToProductResourceMapping, APIProduct apiProduct, String orgId) throws APIManagementException { APIDefinition parser = new OAS3Parser(); SwaggerData updatedData = new SwaggerData(apiProduct); String existingProductSwagger = getAPIDefinitionOfAPIProduct(apiProduct); String updatedProductSwagger = parser.generateAPIDefinition(updatedData, existingProductSwagger); updatedProductSwagger = OASParserUtil.updateAPIProductSwaggerOperations(apiToProductResourceMapping, updatedProductSwagger); saveSwaggerDefinition(productId, updatedProductSwagger, orgId); apiProduct.setDefinition(updatedProductSwagger); } public APIStateChangeResponse changeLifeCycleStatus(APIIdentifier apiIdentifier, String action, String organization) throws APIManagementException, FaultGatewaysException { APIStateChangeResponse response = new APIStateChangeResponse(); try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(this.tenantDomain, true); GenericArtifact apiArtifact = getAPIArtifact(apiIdentifier); String targetStatus; if (apiArtifact != null) { String providerName = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_PROVIDER); String apiName = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_NAME); String apiContext = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_CONTEXT); String apiType = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_TYPE); String apiVersion = apiArtifact.getAttribute(APIConstants.API_OVERVIEW_VERSION); String currentStatus = apiArtifact.getLifecycleState(); String uuid = apiMgtDAO.getUUIDFromIdentifier(apiIdentifier, organization); int apiId = apiMgtDAO.getAPIID(uuid); WorkflowStatus apiWFState = null; WorkflowDTO wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null) { apiWFState = wfDTO.getStatus(); } // if the workflow has started, then executor should not fire again if (!WorkflowStatus.CREATED.equals(apiWFState)) { try { WorkflowProperties workflowProperties = getAPIManagerConfiguration().getWorkflowProperties(); WorkflowExecutor apiStateWFExecutor = WorkflowExecutorFactory.getInstance() .getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_API_STATE); APIStateWorkflowDTO apiStateWorkflow = new APIStateWorkflowDTO(); apiStateWorkflow.setApiCurrentState(currentStatus); apiStateWorkflow.setApiLCAction(action); apiStateWorkflow.setApiName(apiName); apiStateWorkflow.setApiContext(apiContext); apiStateWorkflow.setApiType(apiType); apiStateWorkflow.setApiVersion(apiVersion); apiStateWorkflow.setApiProvider(providerName); apiStateWorkflow.setCallbackUrl(workflowProperties.getWorkflowCallbackAPI()); apiStateWorkflow.setExternalWorkflowReference(apiStateWFExecutor.generateUUID()); apiStateWorkflow.setTenantId(tenantId); apiStateWorkflow.setTenantDomain(this.tenantDomain); apiStateWorkflow.setWorkflowType(WorkflowConstants.WF_TYPE_AM_API_STATE); apiStateWorkflow.setStatus(WorkflowStatus.CREATED); apiStateWorkflow.setCreatedTime(System.currentTimeMillis()); apiStateWorkflow.setWorkflowReference(Integer.toString(apiId)); apiStateWorkflow.setInvoker(this.username); apiStateWorkflow.setApiUUID(uuid); String workflowDescription = "Pending lifecycle state change action: " + action; apiStateWorkflow.setWorkflowDescription(workflowDescription); WorkflowResponse workflowResponse = apiStateWFExecutor.execute(apiStateWorkflow); response.setWorkflowResponse(workflowResponse); } catch (WorkflowException e) { handleException("Failed to execute workflow for life cycle status change : " + e.getMessage(), e); } // get the workflow state once the executor is executed. wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null) { apiWFState = wfDTO.getStatus(); response.setStateChangeStatus(apiWFState.toString()); } else { response.setStateChangeStatus(WorkflowStatus.APPROVED.toString()); } } // only change the lifecycle if approved // apiWFState is null when simple wf executor is used because wf state is not stored in the db. if (WorkflowStatus.APPROVED.equals(apiWFState) || apiWFState == null) { targetStatus = ""; apiArtifact.invokeAction(action, APIConstants.API_LIFE_CYCLE); targetStatus = apiArtifact.getLifecycleState(); if (!currentStatus.equals(targetStatus)) { apiMgtDAO.recordAPILifeCycleEvent(apiId, currentStatus.toUpperCase(), targetStatus.toUpperCase(), this.username, this.tenantId); } if (log.isDebugEnabled()) { String logMessage = "API Status changed successfully. API Name: " + apiIdentifier.getApiName() + ", API Version " + apiIdentifier.getVersion() + ", New Status : " + targetStatus; log.debug(logMessage); } APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_LIFECYCLE_CHANGE.name(), tenantId, tenantDomain, apiName, apiId, uuid, apiVersion, apiType, apiContext, providerName, targetStatus); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); return response; } } } catch (GovernanceException e) { String cause = e.getCause().getMessage(); if (!StringUtils.isEmpty(cause)) { if (cause.contains("FaultGatewaysException:")) { Map<String, Map<String, String>> faultMap = new HashMap<String, Map<String, String>>(); String faultJsonString; if (!StringUtils.isEmpty(cause) && cause.split("FaultGatewaysException:").length > 1) { faultJsonString = cause.split("FaultGatewaysException:")[1]; try { JSONObject faultGatewayJson = (JSONObject) new JSONParser().parse(faultJsonString); faultMap.putAll(faultGatewayJson); throw new FaultGatewaysException(faultMap); } catch (ParseException e1) { log.error("Couldn't parse the Failed Environment json", e); handleException("Couldn't parse the Failed Environment json : " + e.getMessage(), e); } } } else if (cause.contains("APIManagementException:")) { // This exception already logged from APIExecutor class hence this no need to logged again handleException( "Failed to change the life cycle status : " + cause.split("APIManagementException:")[1], e); } else { /* This exception already logged from APIExecutor class hence this no need to logged again This block handles the all the exception which not have custom cause message*/ handleException("Failed to change the life cycle status : " + e.getMessage(), e); } } return response; } finally { PrivilegedCarbonContext.endTenantFlow(); } return response; } @Override public APIStateChangeResponse changeLifeCycleStatus(String orgId, String uuid, String action, Map<String, Boolean> checklist) throws APIManagementException, FaultGatewaysException { APIStateChangeResponse response = new APIStateChangeResponse(); try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(this.tenantDomain, true); //GenericArtifact apiArtifact = getAPIArtifact(apiIdentifier); API api = getLightweightAPIByUUID(uuid, orgId); String targetStatus; if (api != null) { String providerName = api.getId().getProviderName(); String apiName = api.getId().getApiName(); String apiContext = api.getContext(); String apiType = api.getType();//check String apiVersion = api.getId().getVersion(); String currentStatus = api.getStatus(); int apiId = apiMgtDAO.getAPIID(api.getUuid()); WorkflowStatus apiWFState = null; WorkflowDTO wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null) { apiWFState = wfDTO.getStatus(); } // if the workflow has started, then executor should not fire again if (!WorkflowStatus.CREATED.equals(apiWFState)) { try { WorkflowProperties workflowProperties = getAPIManagerConfiguration().getWorkflowProperties(); WorkflowExecutor apiStateWFExecutor = WorkflowExecutorFactory.getInstance() .getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_API_STATE); APIStateWorkflowDTO apiStateWorkflow = new APIStateWorkflowDTO(); apiStateWorkflow.setApiCurrentState(currentStatus); apiStateWorkflow.setApiLCAction(action); apiStateWorkflow.setApiName(apiName); apiStateWorkflow.setApiContext(apiContext); apiStateWorkflow.setApiType(apiType); apiStateWorkflow.setApiVersion(apiVersion); apiStateWorkflow.setApiProvider(providerName); apiStateWorkflow.setCallbackUrl(workflowProperties.getWorkflowCallbackAPI()); apiStateWorkflow.setExternalWorkflowReference(apiStateWFExecutor.generateUUID()); apiStateWorkflow.setTenantId(tenantId); apiStateWorkflow.setTenantDomain(this.tenantDomain); apiStateWorkflow.setWorkflowType(WorkflowConstants.WF_TYPE_AM_API_STATE); apiStateWorkflow.setStatus(WorkflowStatus.CREATED); apiStateWorkflow.setCreatedTime(System.currentTimeMillis()); apiStateWorkflow.setWorkflowReference(Integer.toString(apiId)); apiStateWorkflow.setInvoker(this.username); apiStateWorkflow.setApiUUID(uuid); String workflowDescription = "Pending lifecycle state change action: " + action; apiStateWorkflow.setWorkflowDescription(workflowDescription); WorkflowResponse workflowResponse = apiStateWFExecutor.execute(apiStateWorkflow); response.setWorkflowResponse(workflowResponse); } catch (WorkflowException e) { handleException("Failed to execute workflow for life cycle status change : " + e.getMessage(), e); } // get the workflow state once the executor is executed. wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null) { apiWFState = wfDTO.getStatus(); response.setStateChangeStatus(apiWFState.toString()); } else { response.setStateChangeStatus(WorkflowStatus.APPROVED.toString()); } } // only change the lifecycle if approved // apiWFState is null when simple wf executor is used because wf state is not stored in the db. if (WorkflowStatus.APPROVED.equals(apiWFState) || apiWFState == null) { targetStatus = ""; //RegistryLCManager.getInstance().getStateForTransition(action); //apiArtifact.invokeAction(action, APIConstants.API_LIFE_CYCLE); //targetStatus = apiArtifact.getLifecycleState(); targetStatus = LCManagerFactory.getInstance().getLCManager().getStateForTransition(action); apiPersistenceInstance.changeAPILifeCycle(new Organization(orgId), uuid, targetStatus); api.setOrganization(orgId); changeLifeCycle(api, currentStatus, targetStatus, checklist); //Sending Notifications to existing subscribers if (APIConstants.PUBLISHED.equals(targetStatus)) { sendEmailNotification(api); } // if retired Delete Existing Gateway Deployments. if (APIConstants.RETIRED.equals(targetStatus)){ deleteAPIRevisions(uuid, orgId); } if (!currentStatus.equalsIgnoreCase(targetStatus)) { apiMgtDAO.recordAPILifeCycleEvent(apiId, currentStatus.toUpperCase(), targetStatus.toUpperCase(), this.username, this.tenantId); } if (log.isDebugEnabled()) { String logMessage = "API Status changed successfully. API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + ", New Status : " + targetStatus; log.debug(logMessage); } APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_LIFECYCLE_CHANGE.name(), tenantId, tenantDomain, apiName, apiId, uuid,apiVersion, apiType, apiContext, APIUtil.replaceEmailDomainBack(providerName), targetStatus); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); // Extracting API details for the recommendation system if (recommendationEnvironment != null) { RecommenderEventPublisher extractor = new RecommenderDetailsExtractor(api, tenantDomain, APIConstants.ADD_API); Thread recommendationThread = new Thread(extractor); recommendationThread.start(); } return response; } } } catch (APIPersistenceException e) { handleException("Error while accessing persistance layer", e); } catch (PersistenceException e) { handleException("Error while accessing lifecycle information ", e); } finally { PrivilegedCarbonContext.endTenantFlow(); } return response; } private void changeLifeCycle(API api, String currentState, String targetState, Map<String, Boolean> checklist) throws APIManagementException, FaultGatewaysException { String oldStatus = currentState.toUpperCase(); String newStatus = (targetState != null) ? targetState.toUpperCase() : targetState; boolean isCurrentCreatedOrPrototyped = APIConstants.CREATED.equals(oldStatus) || APIConstants.PROTOTYPED.equals(oldStatus); boolean isStateTransitionToPublished = isCurrentCreatedOrPrototyped && APIConstants.PUBLISHED.equals(newStatus); if (newStatus != null) { // only allow the executor to be used with default LC states transition // check only the newStatus so this executor can be used for LC state change from // custom state to default api state if (isStateTransitionToPublished) { Set<Tier> tiers = api.getAvailableTiers(); String endPoint = api.getEndpointConfig(); String apiSecurity = api.getApiSecurity(); boolean isOauthProtected = apiSecurity == null || apiSecurity.contains(APIConstants.DEFAULT_API_SECURITY_OAUTH2); if (APIConstants.API_TYPE_WEBSUB.equals(api.getType()) || endPoint != null && endPoint.trim().length() > 0) { if (isOauthProtected && (tiers == null || tiers.size() <= 0)) { throw new APIManagementException("Failed to publish service to API store while executing " + "APIExecutor. No Tiers selected"); } } else { throw new APIManagementException("Failed to publish service to API store while executing" + " APIExecutor. No endpoint selected"); } } // push the state change to gateway Map<String, String> failedGateways = propergateAPIStatusChangeToGateways(newStatus, api); if (APIConstants.PUBLISHED.equals(newStatus) || !oldStatus.equals(newStatus)) { //TODO has registry access //if the API is websocket and if default version is selected, update the other versions if (APIConstants.APITransportType.WS.toString().equals(api.getType()) && api.isDefaultVersion()) { Set<String> versions = getAPIVersions(api.getId().getProviderName(), api.getId().getName(), api.getOrganization()); for (String version : versions) { if (version.equals(api.getId().getVersion())) { continue; } String uuid = APIUtil.getUUIDFromIdentifier( new APIIdentifier(api.getId().getProviderName(), api.getId().getName(), version), api.getOrganization()); API otherApi = getLightweightAPIByUUID(uuid, api.getOrganization()); APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, otherApi.getId().getApiName(), otherApi.getId().getId(), otherApi.getUuid(), version, api.getType(), otherApi.getContext(), otherApi.getId().getProviderName(), otherApi.getStatus()); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); } } } if (log.isDebugEnabled()) { String logMessage = "Publish changed status to the Gateway. API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + ", API Context: " + api.getContext() + ", New Status : " + newStatus; log.debug(logMessage); } // update api related information for state change updateAPIforStateChange(api, currentState, newStatus, failedGateways); if (log.isDebugEnabled()) { String logMessage = "API related information successfully updated. API Name: " + api.getId().getApiName() + ", API Version " + api.getId().getVersion() + ", API Context: " + api.getContext() + ", New Status : " + newStatus; log.debug(logMessage); } } else { throw new APIManagementException("Invalid Lifecycle status for default APIExecutor :" + targetState); } boolean deprecateOldVersions = false; boolean makeKeysForwardCompatible = true; // If the API status is CREATED/PROTOTYPED ,check for check list items of lifecycle if (isCurrentCreatedOrPrototyped) { if (checklist != null) { if(checklist.containsKey(APIConstants.DEPRECATE_CHECK_LIST_ITEM)) { deprecateOldVersions = checklist.get(APIConstants.DEPRECATE_CHECK_LIST_ITEM); } if(checklist.containsKey(APIConstants.RESUBSCRIBE_CHECK_LIST_ITEM)) { makeKeysForwardCompatible = !checklist.get(APIConstants.RESUBSCRIBE_CHECK_LIST_ITEM); } } } if (isStateTransitionToPublished) { if (makeKeysForwardCompatible) { makeAPIKeysForwardCompatible(api); } if (deprecateOldVersions) { String provider = APIUtil.replaceEmailDomain(api.getId().getProviderName()); String apiName = api.getId().getName(); List<API> apiList = getAPIVersionsByProviderAndName(provider, apiName, api.getOrganization()); APIVersionComparator versionComparator = new APIVersionComparator(); for (API oldAPI : apiList) { if (oldAPI.getId().getApiName().equals(api.getId().getApiName()) && versionComparator.compare(oldAPI, api) < 0 && (APIConstants.PUBLISHED.equals(oldAPI.getStatus()))) { changeLifeCycleStatus(tenantDomain, oldAPI.getUuid(), APIConstants.API_LC_ACTION_DEPRECATE, null); } } } } } private List<API> getAPIVersionsByProviderAndName(String provider, String apiName, String organization) throws APIManagementException { Set<String> list = apiMgtDAO.getUUIDsOfAPIVersions(apiName, provider); List<API> apiVersions = new ArrayList<API>(); for (String uuid : list) { try { PublisherAPI publisherAPI = apiPersistenceInstance .getPublisherAPI(new Organization(organization), uuid); if (APIConstants.API_PRODUCT.equals(publisherAPI.getType())) { // skip api products continue; } API api = new API(new APIIdentifier(publisherAPI.getProviderName(), publisherAPI.getApiName(), publisherAPI.getVersion())); api.setUuid(uuid); api.setStatus(publisherAPI.getStatus()); apiVersions.add(api); } catch (APIPersistenceException e) { throw new APIManagementException("Error while retrieving the api ", e); } } return apiVersions; } /** * To get the API artifact from the registry * * @param apiIdentifier API den * @return API artifact, if the relevant artifact exists * @throws APIManagementException API Management Exception. */ protected GenericArtifact getAPIArtifact(APIIdentifier apiIdentifier) throws APIManagementException { return APIUtil.getAPIArtifact(apiIdentifier, registry); } @Override public boolean changeAPILCCheckListItems(APIIdentifier apiIdentifier, int checkItem, boolean checkItemValue) throws APIManagementException { String providerTenantMode = apiIdentifier.getProviderName(); boolean success = false; boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } GenericArtifact apiArtifact = getAPIArtifact(apiIdentifier); String status = null; try { if (apiArtifact != null) { if (checkItemValue && !apiArtifact.isLCItemChecked(checkItem, APIConstants.API_LIFE_CYCLE)) { apiArtifact.checkLCItem(checkItem, APIConstants.API_LIFE_CYCLE); } else if (!checkItemValue && apiArtifact.isLCItemChecked(checkItem, APIConstants.API_LIFE_CYCLE)) { apiArtifact.uncheckLCItem(checkItem, APIConstants.API_LIFE_CYCLE); } success = true; } } catch (GovernanceException e) { handleException("Error while setting registry lifecycle checklist items for the API: " + apiIdentifier.getApiName(), e); } } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return success; } /** * This method is to set a lifecycle check list item given the APIIdentifier and the checklist item name. * If the given item not in the allowed lifecycle check items list or item is already checked, this will stay * silent and return false. Otherwise, the checklist item will be updated and returns true. * * @param apiIdentifier APIIdentifier * @param checkItemName Name of the checklist item * @param checkItemValue Value to be set to the checklist item * @return boolean value representing success not not * @throws APIManagementException */ @Override public boolean checkAndChangeAPILCCheckListItem(APIIdentifier apiIdentifier, String checkItemName, boolean checkItemValue) throws APIManagementException { Map<String, Object> lifeCycleData = getAPILifeCycleData(apiIdentifier); if (lifeCycleData != null && lifeCycleData.get(APIConstants.LC_CHECK_ITEMS) != null && lifeCycleData .get(APIConstants.LC_CHECK_ITEMS) instanceof ArrayList) { List checkListItems = (List) lifeCycleData.get(APIConstants.LC_CHECK_ITEMS); for (Object item : checkListItems) { if (item instanceof CheckListItem) { CheckListItem checkListItem = (CheckListItem) item; int index = Integer.parseInt(checkListItem.getOrder()); if (checkListItem.getName().equals(checkItemName)) { changeAPILCCheckListItems(apiIdentifier, index, checkItemValue); return true; } } } } return false; } @Override /* * This method returns the lifecycle data for an API including current state,next states. * * @param apiId APIIdentifier * @return Map<String,Object> a map with lifecycle data */ public Map<String, Object> getAPILifeCycleData(APIIdentifier apiId) throws APIManagementException { String path = APIUtil.getAPIPath(apiId); Map<String, Object> lcData = new HashMap<String, Object>(); String providerTenantMode = apiId.getProviderName(); boolean isTenantFlowStarted = false; try { String tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(providerTenantMode)); if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } Resource apiSourceArtifact = registry.get(path); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { String errorMessage = "Failed to retrieve artifact manager when getting lifecycle data for API " + apiId; log.error(errorMessage); throw new APIManagementException(errorMessage); } GenericArtifact artifact = artifactManager.getGenericArtifact( apiSourceArtifact.getUUID()); //Get all the actions corresponding to current state of the api artifact String[] actions = artifact.getAllLifecycleActions(APIConstants.API_LIFE_CYCLE); //Put next states into map lcData.put(APIConstants.LC_NEXT_STATES, actions); String lifeCycleState = artifact.getLifecycleState(); lcData.put(APIConstants.LC_STATUS, lifeCycleState); LifecycleBean bean; bean = LifecycleBeanPopulator.getLifecycleBean(path, (UserRegistry) registry, configRegistry); if (bean != null) { ArrayList<CheckListItem> checkListItems = new ArrayList<CheckListItem>(); ArrayList<String> permissionList = new ArrayList<String>(); //Get lc properties Property[] lifecycleProps = bean.getLifecycleProperties(); //Get roles of the current session holder String[] roleNames = bean.getRolesOfUser(); for (Property property : lifecycleProps) { String propName = property.getKey(); String[] propValues = property.getValues(); //Check for permission properties if any exists if (propValues != null && propValues.length != 0) { if (propName.startsWith(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX) && propName.endsWith(APIConstants.LC_PROPERTY_PERMISSION_SUFFIX) && propName.contains(APIConstants.API_LIFE_CYCLE)) { for (String role : roleNames) { for (String propValue : propValues) { String key = propName.replace(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX, "") .replace(APIConstants.LC_PROPERTY_PERMISSION_SUFFIX, ""); if (propValue.equals(role)) { permissionList.add(key); } else if (propValue.startsWith(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX) && propValue.endsWith(APIConstants.LC_PROPERTY_PERMISSION_SUFFIX)) { permissionList.add(key); } } } } } } //Check for lifecycle checklist item properties defined for (Property property : lifecycleProps) { String propName = property.getKey(); String[] propValues = property.getValues(); if (propValues != null && propValues.length != 0) { CheckListItem checkListItem = new CheckListItem(); checkListItem.setVisible("false"); if (propName.startsWith(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX) && propName.endsWith(APIConstants.LC_PROPERTY_ITEM_SUFFIX) && propName.contains(APIConstants.API_LIFE_CYCLE)) { if (propValues.length > 2) { for (String param : propValues) { if (param.startsWith(APIConstants.LC_STATUS)) { checkListItem.setLifeCycleStatus(param.substring(7)); } else if (param.startsWith(APIConstants.LC_CHECK_ITEM_NAME)) { checkListItem.setName(param.substring(5)); } else if (param.startsWith(APIConstants.LC_CHECK_ITEM_VALUE)) { checkListItem.setValue(param.substring(6)); } else if (param.startsWith(APIConstants.LC_CHECK_ITEM_ORDER)) { checkListItem.setOrder(param.substring(6)); } } } String key = propName.replace(APIConstants.LC_PROPERTY_CHECKLIST_PREFIX, ""). replace(APIConstants.LC_PROPERTY_ITEM_SUFFIX, ""); if (permissionList.contains(key)) { //Set visible to true if the checklist item permits checkListItem.setVisible("true"); } } if (checkListItem.matchLifeCycleStatus(lifeCycleState)) { checkListItems.add(checkListItem); } } } lcData.put("items", checkListItems); } } catch (Exception e) { handleException(e.getMessage(), e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return lcData; } public Map<String, Object> getAPILifeCycleData(String uuid, String orgId) throws APIManagementException { Map<String, Object> lcData = new HashMap<String, Object>(); API api = getLightweightAPIByUUID(uuid, orgId); List<String> actionsList; try { actionsList = LCManagerFactory.getInstance().getLCManager().getAllowedActionsForState(api.getStatus()); if (actionsList != null) { String[] actionsArray = new String[actionsList.size()]; actionsArray = actionsList.toArray(actionsArray); lcData.put(APIConstants.LC_NEXT_STATES, actionsArray); } ArrayList<CheckListItem> checkListItems = new ArrayList<CheckListItem>(); List<String> checklistItemsList = LCManagerFactory.getInstance().getLCManager() .getCheckListItemsForState(api.getStatus()); if (checklistItemsList != null) { for (String name : checklistItemsList) { CheckListItem item = new CheckListItem(); item.setName(name); item.setValue("false"); checkListItems.add(item); } } lcData.put("items", checkListItems); } catch (PersistenceException e) { throw new APIManagementException("Error while parsing the lifecycle ", e); } String status = api.getStatus(); status = status.substring(0, 1).toUpperCase() + status.substring(1).toLowerCase(); // First letter capital lcData.put(APIConstants.LC_STATUS, status); return lcData; } @Override public String getAPILifeCycleStatus(APIIdentifier apiIdentifier) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(this.tenantDomain, true); GenericArtifact apiArtifact = APIUtil.getAPIArtifact(apiIdentifier, registry); if (apiArtifact == null) { String errorMessage = "API artifact is null when retrieving lifecycle status of API " + apiIdentifier.getApiName(); log.error(errorMessage); throw new APIManagementException(errorMessage); } return apiArtifact.getLifecycleState(); } catch (GovernanceException e) { handleException("Failed to get the life cycle status : " + e.getMessage(), e); return null; } finally { PrivilegedCarbonContext.endTenantFlow(); } } @Override public Map<String, Object> getAllPaginatedAPIs(String tenantDomain, int start, int end) throws APIManagementException { Map<String, Object> result = new HashMap<String, Object>(); List<API> apiSortedList = new ArrayList<API>(); int totalLength = 0; boolean isTenantFlowStarted = false; try { String paginationLimit = getAPIManagerConfiguration() .getFirstProperty(APIConstants.API_PUBLISHER_APIS_PER_PAGE); // If the Config exists use it to set the pagination limit final int maxPaginationLimit; if (paginationLimit != null) { // The additional 1 added to the maxPaginationLimit is to help us determine if more // APIs may exist so that we know that we are unable to determine the actual total // API count. We will subtract this 1 later on so that it does not interfere with // the logic of the rest of the application int pagination = Integer.parseInt(paginationLimit); // Because the store jaggery pagination logic is 10 results per a page we need to set pagination // limit to at least 11 or the pagination done at this level will conflict with the store pagination // leading to some of the APIs not being displayed if (pagination < 11) { pagination = 11; log.warn( "Value of '" + APIConstants.API_PUBLISHER_APIS_PER_PAGE + "' is too low, defaulting to 11"); } maxPaginationLimit = start + pagination + 1; } // Else if the config is not specifed we go with default functionality and load all else { maxPaginationLimit = Integer.MAX_VALUE; } Registry userRegistry; boolean isTenantMode = (tenantDomain != null); if ((isTenantMode && this.tenantDomain == null) || (isTenantMode && isTenantDomainNotMatching(tenantDomain))) { if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); isTenantFlowStarted = true; } int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); APIUtil.loadTenantRegistry(tenantId); userRegistry = ServiceReferenceHolder.getInstance(). getRegistryService().getGovernanceUserRegistry(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME, tenantId); PrivilegedCarbonContext.getThreadLocalCarbonContext() .setUsername(CarbonConstants.REGISTRY_ANONNYMOUS_USERNAME); } else { userRegistry = registry; PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); } PaginationContext.init(start, end, "ASC", APIConstants.PROVIDER_OVERVIEW_NAME, maxPaginationLimit); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(userRegistry, APIConstants.API_KEY); if (artifactManager != null) { List<GovernanceArtifact> genericArtifacts = null; if (isAccessControlRestrictionEnabled && !APIUtil.hasPermission(userNameWithoutChange, APIConstants .Permissions.APIM_ADMIN)) { genericArtifacts = GovernanceUtils.findGovernanceArtifacts(getUserRoleListQuery(), userRegistry, APIConstants.API_RXT_MEDIA_TYPE, true); } else { genericArtifacts = GovernanceUtils .findGovernanceArtifacts(new HashMap<String, List<String>>(), userRegistry, APIConstants.API_RXT_MEDIA_TYPE); } totalLength = PaginationContext.getInstance().getLength(); if (genericArtifacts == null || genericArtifacts.isEmpty()) { result.put("apis", apiSortedList); result.put("totalLength", totalLength); return result; } // Check to see if we can speculate that there are more APIs to be loaded if (maxPaginationLimit == totalLength) { // performance hit --totalLength; // Remove the additional 1 we added earlier when setting max pagination limit } int tempLength = 0; for (GovernanceArtifact artifact : genericArtifacts) { API api = APIUtil.getAPI(artifact); if (api != null) { apiSortedList.add(api); } tempLength++; if (tempLength >= totalLength) { break; } } Collections.sort(apiSortedList, new APINameComparator()); } else { String errorMessage = "Failed to retrieve artifact manager when getting paginated APIs of tenant " + tenantDomain; log.error(errorMessage); throw new APIManagementException(errorMessage); } } catch (RegistryException e) { handleException("Failed to get all APIs", e); } catch (UserStoreException e) { handleException("Failed to get all APIs", e); } finally { PaginationContext.destroy(); if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } result.put("apis", apiSortedList); result.put("totalLength", totalLength); return result; } private boolean isTenantDomainNotMatching(String tenantDomain) { if (this.tenantDomain != null) { return !(this.tenantDomain.equals(tenantDomain)); } return true; } /** * Deploy policy to global CEP and persist the policy object * * @param policy policy object */ public void addPolicy(Policy policy) throws APIManagementException { if (policy instanceof APIPolicy) { APIPolicy apiPolicy = (APIPolicy) policy; //Check if there's a policy exists before adding the new policy Policy existingPolicy = getAPIPolicy(userNameWithoutChange, apiPolicy.getPolicyName()); if (existingPolicy != null) { handleException("Advanced Policy with name " + apiPolicy.getPolicyName() + " already exists"); } apiPolicy.setUserLevel(PolicyConstants.ACROSS_ALL); apiPolicy = apiMgtDAO.addAPIPolicy(apiPolicy); List<Integer> addedConditionGroupIds = new ArrayList<>(); for (Pipeline pipeline : apiPolicy.getPipelines()) { addedConditionGroupIds.add(pipeline.getId()); } APIPolicyEvent apiPolicyEvent = new APIPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_CREATE.name(), tenantId, apiPolicy.getTenantDomain(), apiPolicy.getPolicyId(), apiPolicy.getPolicyName(), apiPolicy.getDefaultQuotaPolicy().getType(), addedConditionGroupIds, null); APIUtil.sendNotification(apiPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof ApplicationPolicy) { ApplicationPolicy appPolicy = (ApplicationPolicy) policy; //Check if there's a policy exists before adding the new policy Policy existingPolicy = getApplicationPolicy(userNameWithoutChange, appPolicy.getPolicyName()); if (existingPolicy != null) { handleException("Application Policy with name " + appPolicy.getPolicyName() + " already exists"); } apiMgtDAO.addApplicationPolicy(appPolicy); //policy id is not set. retrieving policy to get the id. ApplicationPolicy retrievedPolicy = apiMgtDAO.getApplicationPolicy(appPolicy.getPolicyName(), tenantId); ApplicationPolicyEvent applicationPolicyEvent = new ApplicationPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_CREATE.name(), tenantId, appPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), appPolicy.getPolicyName(), appPolicy.getDefaultQuotaPolicy().getType()); APIUtil.sendNotification(applicationPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof SubscriptionPolicy) { SubscriptionPolicy subPolicy = (SubscriptionPolicy) policy; //Check if there's a policy exists before adding the new policy Policy existingPolicy = getSubscriptionPolicy(userNameWithoutChange, subPolicy.getPolicyName()); if (existingPolicy != null) { handleException("Subscription Policy with name " + subPolicy.getPolicyName() + " already exists"); } apiMgtDAO.addSubscriptionPolicy(subPolicy); String monetizationPlan = subPolicy.getMonetizationPlan(); Map<String, String> monetizationPlanProperties = subPolicy.getMonetizationPlanProperties(); if (StringUtils.isNotBlank(monetizationPlan) && MapUtils.isNotEmpty(monetizationPlanProperties)) { createMonetizationPlan(subPolicy); } //policy id is not set. retrieving policy to get the id. SubscriptionPolicy retrievedPolicy = apiMgtDAO.getSubscriptionPolicy(subPolicy.getPolicyName(), tenantId); SubscriptionPolicyEvent subscriptionPolicyEvent = new SubscriptionPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_CREATE.name(), tenantId, subPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), subPolicy.getPolicyName(), subPolicy.getDefaultQuotaPolicy().getType(), subPolicy.getRateLimitCount(),subPolicy.getRateLimitTimeUnit(), subPolicy.isStopOnQuotaReach(), subPolicy.getGraphQLMaxDepth(),subPolicy.getGraphQLMaxComplexity(),subPolicy.getSubscriberCount()); APIUtil.sendNotification(subscriptionPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof GlobalPolicy) { GlobalPolicy globalPolicy = (GlobalPolicy) policy; // checking if policy already exist Policy existingPolicy = getGlobalPolicy(globalPolicy.getPolicyName()); if (existingPolicy != null) { throw new APIManagementException("Policy name already exists"); } apiMgtDAO.addGlobalPolicy(globalPolicy); publishKeyTemplateEvent(globalPolicy.getKeyTemplate(), "add"); GlobalPolicy retrievedPolicy = apiMgtDAO.getGlobalPolicy(globalPolicy.getPolicyName()); GlobalPolicyEvent globalPolicyEvent = new GlobalPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_CREATE.name(), tenantId, globalPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), globalPolicy.getPolicyName()); APIUtil.sendNotification(globalPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else { String msg = "Policy type " + policy.getClass().getName() + " is not supported"; log.error(msg); throw new UnsupportedPolicyTypeException(msg); } } @Override public void configureMonetizationInAPIArtifact(API api) throws APIManagementException { Organization org = new Organization(api.getOrganization()); try { apiPersistenceInstance.updateAPI(org, APIMapper.INSTANCE.toPublisherApi(api)); } catch (APIPersistenceException e) { throw new APIManagementException("Error while updating API details", e); } } @Override public void configureMonetizationInAPIProductArtifact(APIProduct apiProduct) throws APIManagementException { boolean transactionCommitted = false; try { registry.beginTransaction(); String apiArtifactId = registry.get(APIUtil.getAPIProductPath(apiProduct.getId())).getId(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); if (artifactManager == null) { handleException("Artifact manager is null when updating monetization data for API ID " + apiProduct.getId()); } GenericArtifact artifact = artifactManager.getGenericArtifact(apiProduct.getUuid()); //set monetization status (i.e - enabled or disabled) artifact.setAttribute(APIConstants.Monetization.API_MONETIZATION_STATUS, Boolean.toString(apiProduct.getMonetizationStatus())); //clear existing monetization properties artifact.removeAttribute(APIConstants.Monetization.API_MONETIZATION_PROPERTIES); //set new additional monetization data if (apiProduct.getMonetizationProperties() != null) { artifact.setAttribute(APIConstants.Monetization.API_MONETIZATION_PROPERTIES, apiProduct.getMonetizationProperties().toJSONString()); } artifactManager.updateGenericArtifact(artifact); registry.commitTransaction(); transactionCommitted = true; } catch (Exception e) { try { registry.rollbackTransaction(); } catch (RegistryException re) { handleException("Error while rolling back the transaction (monetization status update) for API product : " + apiProduct.getId().getName(), re); } handleException("Error while performing registry transaction (monetization status update) operation", e); } finally { try { if (!transactionCommitted) { registry.rollbackTransaction(); } } catch (RegistryException e) { handleException("Error occurred while rolling back the transaction (monetization status update).", e); } } } /** * This methods creates a monetization plan for a given subscription policy * * @param subPolicy subscription policy * @return true if successful, false otherwise * @throws APIManagementException if failed to create a monetization plan */ private boolean createMonetizationPlan(SubscriptionPolicy subPolicy) throws APIManagementException { Monetization monetizationImplementation = getMonetizationImplClass(); if (monetizationImplementation != null) { try { return monetizationImplementation.createBillingPlan(subPolicy); } catch (MonetizationException e) { APIUtil.handleException("Failed to create monetization plan for : " + subPolicy.getPolicyName(), e); } } return false; } /** * This methods updates the monetization plan for a given subscription policy * * @param subPolicy subscription policy * @return true if successful, false otherwise * @throws APIManagementException if failed to update the plan */ private boolean updateMonetizationPlan(SubscriptionPolicy subPolicy) throws APIManagementException { Monetization monetizationImplementation = getMonetizationImplClass(); if (monetizationImplementation != null) { try { return monetizationImplementation.updateBillingPlan(subPolicy); } catch (MonetizationException e) { APIUtil.handleException("Failed to update monetization plan for : " + subPolicy.getPolicyName(), e); } } return false; } /** * This methods delete the monetization plan for a given subscription policy * * @param subPolicy subscription policy * @return true if successful, false otherwise * @throws APIManagementException if failed to delete the plan */ private boolean deleteMonetizationPlan(SubscriptionPolicy subPolicy) throws APIManagementException { Monetization monetizationImplementation = getMonetizationImplClass(); if (monetizationImplementation != null) { try { return monetizationImplementation.deleteBillingPlan(subPolicy); } catch (MonetizationException e) { APIUtil.handleException("Failed to delete monetization plan of : " + subPolicy.getPolicyName(), e); } } return false; } /** * This methods loads the monetization implementation class * * @return monetization implementation class * @throws APIManagementException if failed to load monetization implementation class */ public Monetization getMonetizationImplClass() throws APIManagementException { APIManagerConfiguration configuration = org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder. getInstance().getAPIManagerConfigurationService().getAPIManagerConfiguration(); Monetization monetizationImpl = null; if (configuration == null) { log.error("API Manager configuration is not initialized."); } else { String monetizationImplClass = configuration.getFirstProperty(APIConstants.Monetization.MONETIZATION_IMPL); if (monetizationImplClass == null) { monetizationImpl = new DefaultMonetizationImpl(); } else { try { monetizationImpl = (Monetization) APIUtil.getClassInstance(monetizationImplClass); } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) { APIUtil.handleException("Failed to load monetization implementation class.", e); } } } return monetizationImpl; } public void updatePolicy(Policy policy) throws APIManagementException { String oldKeyTemplate = null; String newKeyTemplate = null; if (policy instanceof APIPolicy) { APIPolicy apiPolicy = (APIPolicy) policy; apiPolicy.setUserLevel(PolicyConstants.ACROSS_ALL); //TODO this has done due to update policy method not deleting the second level entries when delete on cascade //TODO Need to fix appropriately List<Pipeline> pipelineList = apiPolicy.getPipelines(); if (pipelineList != null && pipelineList.size() != 0) { Iterator<Pipeline> pipelineIterator = pipelineList.iterator(); while (pipelineIterator.hasNext()) { Pipeline pipeline = pipelineIterator.next(); if (!pipeline.isEnabled()) { pipelineIterator.remove(); } else { if (pipeline.getConditions() != null && pipeline.getConditions().size() != 0) { Iterator<Condition> conditionIterator = pipeline.getConditions().iterator(); while (conditionIterator.hasNext()) { Condition condition = conditionIterator.next(); if (JavaUtils.isFalseExplicitly(condition.getConditionEnabled())) { conditionIterator.remove(); } } } else { pipelineIterator.remove(); } } } } APIPolicy existingPolicy = apiMgtDAO.getAPIPolicy(policy.getPolicyName(), policy.getTenantId()); apiPolicy = apiMgtDAO.updateAPIPolicy(apiPolicy); //TODO rename level to resource or appropriate name APIManagerConfiguration config = getAPIManagerConfiguration(); if (log.isDebugEnabled()) { log.debug("Calling invalidation cache for API Policy for tenant "); } String policyContext = APIConstants.POLICY_CACHE_CONTEXT + "/t/" + apiPolicy.getTenantDomain() + "/"; invalidateResourceCache(policyContext, null, Collections.EMPTY_SET); List<Integer> addedConditionGroupIds = new ArrayList<>(); List<Integer> deletedConditionGroupIds = new ArrayList<>(); for (Pipeline pipeline : existingPolicy.getPipelines()) { deletedConditionGroupIds.add(pipeline.getId()); } for (Pipeline pipeline : apiPolicy.getPipelines()) { addedConditionGroupIds.add(pipeline.getId()); } APIPolicyEvent apiPolicyEvent = new APIPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_UPDATE.name(), tenantId, apiPolicy.getTenantDomain(), apiPolicy.getPolicyId(), apiPolicy.getPolicyName(), apiPolicy.getDefaultQuotaPolicy().getType(), addedConditionGroupIds, deletedConditionGroupIds); APIUtil.sendNotification(apiPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof ApplicationPolicy) { ApplicationPolicy appPolicy = (ApplicationPolicy) policy; apiMgtDAO.updateApplicationPolicy(appPolicy); //policy id is not set. retrieving policy to get the id. ApplicationPolicy retrievedPolicy = apiMgtDAO.getApplicationPolicy(appPolicy.getPolicyName(), tenantId); ApplicationPolicyEvent applicationPolicyEvent = new ApplicationPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_UPDATE.name(), tenantId, appPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), appPolicy.getPolicyName(), appPolicy.getDefaultQuotaPolicy().getType()); APIUtil.sendNotification(applicationPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof SubscriptionPolicy) { SubscriptionPolicy subPolicy = (SubscriptionPolicy) policy; apiMgtDAO.updateSubscriptionPolicy(subPolicy); String monetizationPlan = subPolicy.getMonetizationPlan(); Map<String, String> monetizationPlanProperties = subPolicy.getMonetizationPlanProperties(); //call the monetization extension point to create plans (if any) if (StringUtils.isNotBlank(monetizationPlan) && MapUtils.isNotEmpty(monetizationPlanProperties)) { updateMonetizationPlan(subPolicy); } //policy id is not set. retrieving policy to get the id. SubscriptionPolicy retrievedPolicy = apiMgtDAO.getSubscriptionPolicy(subPolicy.getPolicyName(), tenantId); SubscriptionPolicyEvent subscriptionPolicyEvent = new SubscriptionPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_UPDATE.name(), tenantId,subPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), subPolicy.getPolicyName(), subPolicy.getDefaultQuotaPolicy().getType(), subPolicy.getRateLimitCount(),subPolicy.getRateLimitTimeUnit(), subPolicy.isStopOnQuotaReach(),subPolicy.getGraphQLMaxDepth(), subPolicy.getGraphQLMaxComplexity(), subPolicy.getSubscriberCount()); APIUtil.sendNotification(subscriptionPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (policy instanceof GlobalPolicy) { GlobalPolicy globalPolicy = (GlobalPolicy) policy; // getting key templates before updating database GlobalPolicy oldGlobalPolicy = apiMgtDAO.getGlobalPolicy(policy.getPolicyName()); oldKeyTemplate = oldGlobalPolicy.getKeyTemplate(); newKeyTemplate = globalPolicy.getKeyTemplate(); apiMgtDAO.updateGlobalPolicy(globalPolicy); GlobalPolicy retrievedPolicy = apiMgtDAO.getGlobalPolicy(globalPolicy.getPolicyName()); GlobalPolicyEvent globalPolicyEvent = new GlobalPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_UPDATE.name(), tenantId, globalPolicy.getTenantDomain(), retrievedPolicy.getPolicyId(), globalPolicy.getPolicyName()); APIUtil.sendNotification(globalPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else { String msg = "Policy type " + policy.getClass().getName() + " is not supported"; log.error(msg); throw new UnsupportedPolicyTypeException(msg); } //publishing keytemplate after update if (oldKeyTemplate != null && newKeyTemplate != null) { publishKeyTemplateEvent(oldKeyTemplate, "remove"); publishKeyTemplateEvent(newKeyTemplate, "add"); } } /** * @param username username to recognize tenant * @param level policy level to be applied * @return * @throws APIManagementException */ public String[] getPolicyNames(String username, String level) throws APIManagementException { String[] policyNames = apiMgtDAO.getPolicyNames(level, username); return policyNames; } /** * @param username username to recognize the tenant * @param policyLevel policy level * @param policyName name of the policy to be deleted * @throws APIManagementException */ public void deletePolicy(String username, String policyLevel, String policyName) throws APIManagementException { int tenantID = APIUtil.getTenantId(username); if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { //need to load whole policy object to get the pipelines APIPolicy policy = apiMgtDAO.getAPIPolicy(policyName, APIUtil.getTenantId(username)); List<Integer> deletedConditionGroupIds = new ArrayList<>(); for (Pipeline pipeline : policy.getPipelines()) { deletedConditionGroupIds.add(pipeline.getId()); } APIPolicyEvent apiPolicyEvent = new APIPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_DELETE.name(), tenantId, policy.getTenantDomain(), policy.getPolicyId(), policy.getPolicyName(), policy.getDefaultQuotaPolicy().getType(), null, deletedConditionGroupIds); APIUtil.sendNotification(apiPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { ApplicationPolicy appPolicy = apiMgtDAO.getApplicationPolicy(policyName, tenantID); ApplicationPolicyEvent applicationPolicyEvent = new ApplicationPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_DELETE.name(), tenantId, appPolicy.getTenantDomain(), appPolicy.getPolicyId(), appPolicy.getPolicyName(), appPolicy.getDefaultQuotaPolicy().getType()); APIUtil.sendNotification(applicationPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { SubscriptionPolicy subscriptionPolicy = apiMgtDAO.getSubscriptionPolicy(policyName, tenantID); //call the monetization extension point to delete plans if any deleteMonetizationPlan(subscriptionPolicy); SubscriptionPolicyEvent subscriptionPolicyEvent = new SubscriptionPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_DELETE.name(), tenantId, subscriptionPolicy.getTenantDomain(), subscriptionPolicy.getPolicyId(), subscriptionPolicy.getPolicyName(), subscriptionPolicy.getDefaultQuotaPolicy().getType(), subscriptionPolicy.getRateLimitCount(), subscriptionPolicy.getRateLimitTimeUnit(), subscriptionPolicy.isStopOnQuotaReach(), subscriptionPolicy.getGraphQLMaxDepth(), subscriptionPolicy.getGraphQLMaxComplexity(), subscriptionPolicy.getSubscriberCount()); APIUtil.sendNotification(subscriptionPolicyEvent, APIConstants.NotifierType.POLICY.name()); } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { GlobalPolicy globalPolicy = apiMgtDAO.getGlobalPolicy(policyName); GlobalPolicyEvent globalPolicyEvent = new GlobalPolicyEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.POLICY_DELETE.name(), tenantId, globalPolicy.getTenantDomain(), globalPolicy.getPolicyId(), globalPolicy.getPolicyName()); APIUtil.sendNotification(globalPolicyEvent, APIConstants.NotifierType.POLICY.name()); } GlobalPolicy globalPolicy = null; if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { globalPolicy = apiMgtDAO.getGlobalPolicy(policyName); } //remove from database apiMgtDAO.removeThrottlePolicy(policyLevel, policyName, tenantID); if (globalPolicy != null) { publishKeyTemplateEvent(globalPolicy.getKeyTemplate(), "remove"); } } /** * Returns true if key template given by the global policy already exists. * But this check will exclude the policy represented by the policy name * * @param policy Global policy * @return true if Global policy key template already exists */ public boolean isGlobalPolicyKeyTemplateExists(GlobalPolicy policy) throws APIManagementException { return apiMgtDAO.isKeyTemplatesExist(policy); } public boolean hasAttachments(String username, String policyName, String policyType) throws APIManagementException { int tenantID = APIUtil.getTenantId(username); String tenantDomain = MultitenantUtils.getTenantDomain(username); String tenantDomainWithAt = username; if (APIUtil.getSuperTenantId() != tenantID) { tenantDomainWithAt = "@" + tenantDomain; } boolean hasSubscription = apiMgtDAO.hasSubscription(policyName, tenantDomainWithAt, policyType); return hasSubscription; } @Override public List<BlockConditionsDTO> getBlockConditions() throws APIManagementException { return apiMgtDAO.getBlockConditions(tenantDomain); } @Override public BlockConditionsDTO getBlockCondition(int conditionId) throws APIManagementException { return apiMgtDAO.getBlockCondition(conditionId); } @Override public BlockConditionsDTO getBlockConditionByUUID(String uuid) throws APIManagementException { BlockConditionsDTO blockCondition = apiMgtDAO.getBlockConditionByUUID(uuid); if (blockCondition == null) { handleBlockConditionNotFoundException("Block condition: " + uuid + " was not found."); } return blockCondition; } @Override public boolean updateBlockCondition(int conditionId, String state) throws APIManagementException { boolean updateState = apiMgtDAO.updateBlockConditionState(conditionId, state); BlockConditionsDTO blockConditionsDTO = apiMgtDAO.getBlockCondition(conditionId); if (updateState) { publishBlockingEventUpdate(blockConditionsDTO); } return updateState; } @Override public boolean updateBlockConditionByUUID(String uuid, String state) throws APIManagementException { boolean updateState = apiMgtDAO.updateBlockConditionStateByUUID(uuid, state); BlockConditionsDTO blockConditionsDTO = apiMgtDAO.getBlockConditionByUUID(uuid); if (updateState && blockConditionsDTO != null) { publishBlockingEventUpdate(blockConditionsDTO); } return updateState; } @Override public String addBlockCondition(String conditionType, String conditionValue) throws APIManagementException { if (APIConstants.BLOCKING_CONDITIONS_USER.equals(conditionType)) { conditionValue = MultitenantUtils.getTenantAwareUsername(conditionValue); conditionValue = conditionValue + "@" + tenantDomain; } BlockConditionsDTO blockConditionsDTO = new BlockConditionsDTO(); blockConditionsDTO.setConditionType(conditionType); blockConditionsDTO.setConditionValue(conditionValue); blockConditionsDTO.setTenantDomain(tenantDomain); blockConditionsDTO.setEnabled(true); blockConditionsDTO.setUUID(UUID.randomUUID().toString()); BlockConditionsDTO createdBlockConditionsDto = apiMgtDAO.addBlockConditions(blockConditionsDTO); if (createdBlockConditionsDto != null) { publishBlockingEvent(createdBlockConditionsDto, "true"); } return createdBlockConditionsDto.getUUID(); } @Override public String addBlockCondition(String conditionType, String conditionValue, boolean conditionStatus) throws APIManagementException { if (APIConstants.BLOCKING_CONDITIONS_USER.equals(conditionType)) { conditionValue = MultitenantUtils.getTenantAwareUsername(conditionValue); conditionValue = conditionValue + "@" + tenantDomain; } BlockConditionsDTO blockConditionsDTO = new BlockConditionsDTO(); blockConditionsDTO.setConditionType(conditionType); blockConditionsDTO.setConditionValue(conditionValue); blockConditionsDTO.setTenantDomain(tenantDomain); blockConditionsDTO.setEnabled(conditionStatus); blockConditionsDTO.setUUID(UUID.randomUUID().toString()); BlockConditionsDTO createdBlockConditionsDto = apiMgtDAO.addBlockConditions(blockConditionsDTO); if (createdBlockConditionsDto != null) { publishBlockingEvent(createdBlockConditionsDto, "true"); } return createdBlockConditionsDto.getUUID(); } @Override public boolean deleteBlockCondition(int conditionId) throws APIManagementException { BlockConditionsDTO blockCondition = apiMgtDAO.getBlockCondition(conditionId); boolean deleteState = apiMgtDAO.deleteBlockCondition(conditionId); if (deleteState && blockCondition != null) { unpublishBlockCondition(blockCondition); } return deleteState; } @Override public boolean deleteBlockConditionByUUID(String uuid) throws APIManagementException { boolean deleteState = false; BlockConditionsDTO blockCondition = apiMgtDAO.getBlockConditionByUUID(uuid); if (blockCondition != null) { deleteState = apiMgtDAO.deleteBlockCondition(blockCondition.getConditionId()); if (deleteState) { unpublishBlockCondition(blockCondition); } } return deleteState; } /** * Unpublish a blocking condition. * * @param blockCondition Block Condition object */ private void unpublishBlockCondition(BlockConditionsDTO blockCondition) { String blockingConditionType = blockCondition.getConditionType(); String blockingConditionValue = blockCondition.getConditionValue(); if (APIConstants.BLOCKING_CONDITIONS_USER.equalsIgnoreCase(blockingConditionType)) { blockingConditionValue = MultitenantUtils.getTenantAwareUsername(blockingConditionValue); blockingConditionValue = blockingConditionValue + "@" + tenantDomain; blockCondition.setConditionValue(blockingConditionValue); } publishBlockingEvent(blockCondition, "delete"); } @Override public APIPolicy getAPIPolicy(String username, String policyName) throws APIManagementException { return apiMgtDAO.getAPIPolicy(policyName, APIUtil.getTenantId(username)); } @Override public APIPolicy getAPIPolicyByUUID(String uuid) throws APIManagementException { APIPolicy policy = apiMgtDAO.getAPIPolicyByUUID(uuid); if (policy == null) { handlePolicyNotFoundException("Advanced Policy: " + uuid + " was not found."); } return policy; } @Override public ApplicationPolicy getApplicationPolicy(String username, String policyName) throws APIManagementException { return apiMgtDAO.getApplicationPolicy(policyName, APIUtil.getTenantId(username)); } @Override public ApplicationPolicy getApplicationPolicyByUUID(String uuid) throws APIManagementException { ApplicationPolicy policy = apiMgtDAO.getApplicationPolicyByUUID(uuid); if (policy == null) { handlePolicyNotFoundException("Application Policy: " + uuid + " was not found."); } return policy; } @Override public SubscriptionPolicy getSubscriptionPolicy(String username, String policyName) throws APIManagementException { return apiMgtDAO.getSubscriptionPolicy(policyName, APIUtil.getTenantId(username)); } @Override public SubscriptionPolicy getSubscriptionPolicyByUUID(String uuid) throws APIManagementException { SubscriptionPolicy policy = apiMgtDAO.getSubscriptionPolicyByUUID(uuid); if (policy == null) { handlePolicyNotFoundException("Subscription Policy: " + uuid + " was not found."); } return policy; } @Override public GlobalPolicy getGlobalPolicy(String policyName) throws APIManagementException { return apiMgtDAO.getGlobalPolicy(policyName); } @Override public GlobalPolicy getGlobalPolicyByUUID(String uuid) throws APIManagementException { GlobalPolicy policy = apiMgtDAO.getGlobalPolicyByUUID(uuid); if (policy == null) { handlePolicyNotFoundException("Global Policy: " + uuid + " was not found."); } return policy; } /** * Publishes the changes on blocking conditions. * * @param blockCondition Block Condition object * @throws APIManagementException */ private void publishBlockingEventUpdate(BlockConditionsDTO blockCondition) throws APIManagementException { if (blockCondition != null) { String blockingConditionType = blockCondition.getConditionType(); String blockingConditionValue = blockCondition.getConditionValue(); if (APIConstants.BLOCKING_CONDITIONS_USER.equalsIgnoreCase(blockingConditionType)) { blockingConditionValue = MultitenantUtils.getTenantAwareUsername(blockingConditionValue); blockingConditionValue = blockingConditionValue + "@" + tenantDomain; blockCondition.setConditionValue(blockingConditionValue); } publishBlockingEvent(blockCondition, Boolean.toString(blockCondition.isEnabled())); } } /** * Publishes the changes on blocking conditions. * @param blockConditionsDTO Blockcondition Dto event */ private void publishBlockingEvent(BlockConditionsDTO blockConditionsDTO, String state) { String conditionType = blockConditionsDTO.getConditionType(); String conditionValue = blockConditionsDTO.getConditionValue(); if (APIConstants.BLOCKING_CONDITIONS_IP.equals(conditionType) || APIConstants.BLOCK_CONDITION_IP_RANGE.equals(conditionType)) { conditionValue = StringEscapeUtils.escapeJava(conditionValue); } Object[] objects = new Object[]{blockConditionsDTO.getConditionId(), blockConditionsDTO.getConditionType(), conditionValue, state, tenantDomain}; Event blockingMessage = new Event(APIConstants.BLOCKING_CONDITIONS_STREAM_ID, System.currentTimeMillis(), null, null, objects); ThrottleProperties throttleProperties = getAPIManagerConfiguration().getThrottleProperties(); if (throttleProperties.getDataPublisher() != null && throttleProperties.getDataPublisher().isEnabled()) { APIUtil.publishEventToTrafficManager(Collections.EMPTY_MAP, blockingMessage); } } private void publishKeyTemplateEvent(String templateValue, String state) { Object[] objects = new Object[]{templateValue,state}; Event keyTemplateMessage = new Event(APIConstants.KEY_TEMPLATE_STREM_ID, System.currentTimeMillis(), null, null, objects); ThrottleProperties throttleProperties = getAPIManagerConfiguration().getThrottleProperties(); if (throttleProperties.getDataPublisher() != null && throttleProperties.getDataPublisher().isEnabled()) { APIUtil.publishEventToTrafficManager(Collections.EMPTY_MAP, keyTemplateMessage); } } public String getLifecycleConfiguration(String tenantDomain) throws APIManagementException { boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { isTenantFlowStarted = true; PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } return APIUtil.getFullLifeCycleData(configRegistry); } catch (XMLStreamException e) { handleException("Parsing error while getting the lifecycle configuration content.", e); return null; } catch (RegistryException e) { handleException("Registry error while getting the lifecycle configuration content.", e); return null; } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } public String getExternalWorkflowReferenceId(int subscriptionId) throws APIManagementException { return apiMgtDAO.getExternalWorkflowReferenceForSubscription(subscriptionId); } @Override public int addCertificate(String userName, String certificate, String alias, String endpoint) throws APIManagementException { ResponseCode responseCode = ResponseCode.INTERNAL_SERVER_ERROR; String tenantDomain = MultitenantUtils.getTenantDomain(userName); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); responseCode = certificateManager .addCertificateToParentNode(certificate, alias, endpoint, tenantId); CertificateEvent certificateEvent = new CertificateEvent(UUID.randomUUID().toString(), System.currentTimeMillis(),APIConstants.EventType.ENDPOINT_CERTIFICATE_ADD.toString(), tenantDomain,alias,endpoint); APIUtil.sendNotification(certificateEvent, APIConstants.NotifierType.CERTIFICATE.name()); } catch (UserStoreException e) { handleException("Error while reading tenant information", e); } return responseCode.getResponseCode(); } @Override public int addClientCertificate(String userName, APIIdentifier apiIdentifier, String certificate, String alias, String tierName, String organization) throws APIManagementException { ResponseCode responseCode = ResponseCode.INTERNAL_SERVER_ERROR; String tenantDomain = MultitenantUtils.getTenantDomain(userName); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); responseCode = certificateManager .addClientCertificate(apiIdentifier, certificate, alias, tierName, tenantId, organization); } catch (UserStoreException e) { handleException("Error while reading tenant information, client certificate addition failed for the API " + apiIdentifier.toString(), e); } return responseCode.getResponseCode(); } @Override public int deleteCertificate(String userName, String alias, String endpoint) throws APIManagementException { ResponseCode responseCode = ResponseCode.INTERNAL_SERVER_ERROR; String tenantDomain = MultitenantUtils.getTenantDomain(userName); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); responseCode = certificateManager.deleteCertificateFromParentNode(alias, endpoint, tenantId); CertificateEvent certificateEvent = new CertificateEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.ENDPOINT_CERTIFICATE_REMOVE.toString(), tenantDomain, alias, endpoint); APIUtil.sendNotification(certificateEvent, APIConstants.NotifierType.CERTIFICATE.name()); } catch (UserStoreException e) { handleException("Error while reading tenant information", e); } return responseCode.getResponseCode(); } @Override public int deleteClientCertificate(String userName, APIIdentifier apiIdentifier, String alias) throws APIManagementException { ResponseCode responseCode = ResponseCode.INTERNAL_SERVER_ERROR; String tenantDomain = MultitenantUtils.getTenantDomain(userName); try { int tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); responseCode = certificateManager.deleteClientCertificateFromParentNode(apiIdentifier, alias, tenantId); } catch (UserStoreException e) { handleException( "Error while reading tenant information while trying to delete client certificate with alias " + alias + " for the API " + apiIdentifier.toString(), e); } return responseCode.getResponseCode(); } @Override public boolean isConfigured() { return certificateManager.isConfigured(); } @Override public List<CertificateMetadataDTO> getCertificates(String userName) throws APIManagementException { int tenantId = 0; try { tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager() .getTenantId(tenantDomain); } catch (UserStoreException e) { handleException("Error while reading tenant information", e); } return certificateManager.getCertificates(tenantId); } @Override public List<CertificateMetadataDTO> searchCertificates(int tenantId, String alias, String endpoint) throws APIManagementException { return certificateManager.getCertificates(tenantId, alias, endpoint); } @Override public List<ClientCertificateDTO> searchClientCertificates(int tenantId, String alias, APIIdentifier apiIdentifier, String organization) throws APIManagementException { return certificateManager.searchClientCertificates(tenantId, alias, apiIdentifier, organization); } @Override public List<ClientCertificateDTO> searchClientCertificates(int tenantId, String alias, APIProductIdentifier apiProductIdentifier, String organization) throws APIManagementException { APIIdentifier apiIdentifier = new APIIdentifier(apiProductIdentifier.getProviderName(), apiProductIdentifier.getName(), apiProductIdentifier.getVersion()); return certificateManager.searchClientCertificates(tenantId, alias, apiIdentifier, organization); } @Override public boolean isCertificatePresent(int tenantId, String alias) throws APIManagementException { return certificateManager.isCertificatePresent(tenantId, alias); } @Override public ClientCertificateDTO getClientCertificate(int tenantId, String alias, String organization) throws APIManagementException { List<ClientCertificateDTO> clientCertificateDTOS = certificateManager .searchClientCertificates(tenantId, alias, null, organization); if (clientCertificateDTOS != null && clientCertificateDTOS.size() > 0) { return clientCertificateDTOS.get(0); } return null; } @Override public ClientCertificateDTO getClientCertificate(int tenantId, String alias, APIIdentifier apiIdentifier, String organization) throws APIManagementException { List<ClientCertificateDTO> clientCertificateDTOS = certificateManager .searchClientCertificates(tenantId, alias, apiIdentifier, organization); if (clientCertificateDTOS != null && clientCertificateDTOS.size() > 0) { return clientCertificateDTOS.get(0); } return null; } @Override public CertificateInformationDTO getCertificateStatus(String alias) throws APIManagementException { return certificateManager.getCertificateInformation(alias); } @Override public int updateCertificate(String certificateString, String alias) throws APIManagementException { ResponseCode responseCode = certificateManager.updateCertificate(certificateString, alias); if (responseCode != null && responseCode.getResponseCode() == ResponseCode.SUCCESS.getResponseCode()) { CertificateEvent certificateEvent = new CertificateEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.ENDPOINT_CERTIFICATE_UPDATE.toString(), tenantDomain, alias); APIUtil.sendNotification(certificateEvent, APIConstants.NotifierType.CERTIFICATE.name()); } return responseCode != null ? responseCode.getResponseCode() : ResponseCode.INTERNAL_SERVER_ERROR.getResponseCode(); } @Override public int updateClientCertificate(String certificate, String alias, APIIdentifier apiIdentifier, String tier, int tenantId, String organization) throws APIManagementException { ResponseCode responseCode = certificateManager .updateClientCertificate(certificate, alias, tier, tenantId, organization); return responseCode != null ? responseCode.getResponseCode() : ResponseCode.INTERNAL_SERVER_ERROR.getResponseCode(); } @Override public int getCertificateCountPerTenant(int tenantId) throws APIManagementException { return certificateManager.getCertificateCount(tenantId); } @Override public int getClientCertificateCount(int tenantId) throws APIManagementException { return certificateManager.getClientCertificateCount(tenantId); } @Override public ByteArrayInputStream getCertificateContent(String alias) throws APIManagementException { return certificateManager.getCertificateContent(alias); } /** * Get the workflow status information for the given api for the given workflow type * * @param uuid Api uuid * @param workflowType workflow type * @return WorkflowDTO * @throws APIManagementException */ public WorkflowDTO getAPIWorkflowStatus(String uuid, String workflowType) throws APIManagementException { return APIUtil.getAPIWorkflowStatus(uuid, workflowType); } @Override public void deleteWorkflowTask(String uuid) throws APIManagementException { int apiId; try { apiId = apiMgtDAO.getAPIID(uuid); cleanUpPendingAPIStateChangeTask(apiId); } catch (APIManagementException e) { handleException("Error while deleting the workflow task.", e); } catch (WorkflowException e) { handleException("Error while deleting the workflow task.", e); } } private void cleanUpPendingAPIStateChangeTask(int apiId) throws WorkflowException, APIManagementException { //Run cleanup task for workflow WorkflowExecutor apiStateChangeWFExecutor = getWorkflowExecutor(WorkflowConstants.WF_TYPE_AM_API_STATE); WorkflowDTO wfDTO = apiMgtDAO.retrieveWorkflowFromInternalReference(Integer.toString(apiId), WorkflowConstants.WF_TYPE_AM_API_STATE); if (wfDTO != null && WorkflowStatus.CREATED == wfDTO.getStatus()) { apiStateChangeWFExecutor.cleanUpPendingTask(wfDTO.getExternalWorkflowReference()); } } /** * Clean-up pending subscriptions of a given API * * @param uuid API uuid * @throws APIManagementException */ private void cleanUpPendingSubscriptionCreationProcessesByAPI(String uuid) throws APIManagementException { WorkflowExecutor createSubscriptionWFExecutor = getWorkflowExecutor( WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION); Set<Integer> pendingSubscriptions = apiMgtDAO.getPendingSubscriptionsByAPIId(uuid); String workflowExtRef = null; for (int subscription : pendingSubscriptions) { try { workflowExtRef = apiMgtDAO.getExternalWorkflowReferenceForSubscription(subscription); createSubscriptionWFExecutor.cleanUpPendingTask(workflowExtRef); } catch (APIManagementException ex) { // failed clean-up processes are ignored to prevent failures in API state change flow log.warn("Failed to retrieve external workflow reference for subscription for subscription ID: " + subscription); } catch (WorkflowException ex) { // failed clean-up processes are ignored to prevent failures in API state change flow log.warn("Failed to clean-up pending subscription approval task for subscription ID: " + subscription); } } } /** * Returns the given workflow executor * * @param workflowType Workflow executor type * @return WorkflowExecutor of given type * @throws WorkflowException if an error occurred while getting WorkflowExecutor */ protected WorkflowExecutor getWorkflowExecutor(String workflowType) throws APIManagementException { try { return WorkflowExecutorFactory.getInstance().getWorkflowExecutor(workflowType); } catch (WorkflowException e) { handleException("Error while obtaining WorkflowExecutor instance for workflow type :" + workflowType); } return null; } protected void removeFromGateway(APIProduct apiProduct, String tenantDomain, Set<APIRevisionDeployment> gatewaysToRemove, Set<String> gatewaysToAdd) throws APIManagementException { APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); Set<API> associatedAPIs = getAssociatedAPIs(apiProduct); Set<String> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : gatewaysToRemove) { environmentsToRemove.add(apiRevisionDeployment.getDeployment()); } environmentsToRemove.removeAll(gatewaysToAdd); gatewayManager.unDeployFromGateway(apiProduct, tenantDomain, associatedAPIs, environmentsToRemove); } protected int getTenantId(String tenantDomain) throws UserStoreException { return ServiceReferenceHolder.getInstance().getRealmService().getTenantManager().getTenantId(tenantDomain); } protected void sendAsncNotification(NotificationDTO notificationDTO) throws NotificationException { new NotificationExecutor().sendAsyncNotifications(notificationDTO); } protected void invalidateResourceCache(String apiContext, String apiVersion,Set<URITemplate> uriTemplates) { APIAuthenticationAdminClient client = new APIAuthenticationAdminClient(); client.invalidateResourceCache(apiContext, apiVersion, uriTemplates); } /** * To add API/Product roles restrictions and add additional properties. * * @param artifactPath Path of the API/Product artifact. * @param publisherAccessControlRoles Role specified for the publisher access control. * @param publisherAccessControl Publisher Access Control restriction. * @param additionalProperties Additional properties that is related with an API/Product. * @throws RegistryException Registry Exception. */ private void updateRegistryResources(String artifactPath, String publisherAccessControlRoles, String publisherAccessControl, Map<String, String> additionalProperties) throws RegistryException { publisherAccessControlRoles = (publisherAccessControlRoles == null || publisherAccessControlRoles.trim() .isEmpty()) ? APIConstants.NULL_USER_ROLE_LIST : publisherAccessControlRoles; if (publisherAccessControlRoles.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST)) { publisherAccessControl = APIConstants.NO_ACCESS_CONTROL; } if (!registry.resourceExists(artifactPath)) { return; } Resource apiResource = registry.get(artifactPath); if (apiResource != null) { if (additionalProperties != null) { // Removing all the properties, before updating new properties. Properties properties = apiResource.getProperties(); if (properties != null) { Enumeration propertyNames = properties.propertyNames(); while (propertyNames.hasMoreElements()) { String propertyName = (String) propertyNames.nextElement(); if (propertyName.startsWith(APIConstants.API_RELATED_CUSTOM_PROPERTIES_PREFIX)) { apiResource.removeProperty(propertyName); } } } } // We are changing to lowercase, as registry search only supports lower-case characters. apiResource.setProperty(APIConstants.PUBLISHER_ROLES, publisherAccessControlRoles.toLowerCase()); // This property will be only used for display proposes in the Publisher UI so that the original case of // the roles that were specified can be maintained. apiResource.setProperty(APIConstants.DISPLAY_PUBLISHER_ROLES, publisherAccessControlRoles); apiResource.setProperty(APIConstants.ACCESS_CONTROL, publisherAccessControl); apiResource.removeProperty(APIConstants.CUSTOM_API_INDEXER_PROPERTY); if (additionalProperties != null && additionalProperties.size() != 0) { for (Map.Entry<String, String> entry : additionalProperties.entrySet()) { apiResource.setProperty( (APIConstants.API_RELATED_CUSTOM_PROPERTIES_PREFIX + entry.getKey()), entry.getValue()); } } registry.put(artifactPath, apiResource); } } /** * To get the query to retrieve user role list query based on current role list. * * @return the query with user role list. * @throws APIManagementException API Management Exception. */ private String getUserRoleListQuery() throws APIManagementException { StringBuilder rolesQuery = new StringBuilder(); rolesQuery.append('('); rolesQuery.append(APIConstants.NULL_USER_ROLE_LIST); String[] userRoles = APIUtil.getListOfRoles(userNameWithoutChange); String skipRolesByRegex = APIUtil.getSkipRolesByRegex(); if (StringUtils.isNotEmpty(skipRolesByRegex)) { List<String> filteredUserRoles = new ArrayList<>(Arrays.asList(userRoles)); String[] regexList = skipRolesByRegex.split(","); for (int i = 0; i < regexList.length; i++) { Pattern p = Pattern.compile(regexList[i]); Iterator<String> itr = filteredUserRoles.iterator(); while(itr.hasNext()) { String role = itr.next(); Matcher m = p.matcher(role); if (m.matches()) { itr.remove(); } } } userRoles = filteredUserRoles.toArray(new String[0]); } if (userRoles != null) { for (String userRole : userRoles) { rolesQuery.append(" OR "); rolesQuery.append(ClientUtils.escapeQueryChars(APIUtil.sanitizeUserRole(userRole.toLowerCase()))); } } rolesQuery.append(")"); if(log.isDebugEnabled()) { log.debug("User role list solr query " + APIConstants.PUBLISHER_ROLES + "=" + rolesQuery.toString()); } return APIConstants.PUBLISHER_ROLES + "=" + rolesQuery.toString(); } @Override protected String getSearchQuery(String searchQuery) throws APIManagementException { if (!isAccessControlRestrictionEnabled || APIUtil.hasPermission(userNameWithoutChange, APIConstants.Permissions .APIM_ADMIN)) { return searchQuery; } String criteria = getUserRoleListQuery(); if (searchQuery != null && !searchQuery.trim().isEmpty()) { criteria = criteria + "&" + searchQuery; } return criteria; } /** * Method to get the user specified mediation sequence. * * @param apiIdentifier : The identifier of the api. * @param type : Mediation type. {in, out, fault} * @param name : The name of the sequence that needed. * @return : The content of the mediation sequence. */ public String getSequenceFileContent(APIIdentifier apiIdentifier, String type, String name) throws APIManagementException { Resource requiredSequence; InputStream sequenceStream; String sequenceText = ""; try { if (apiIdentifier != null && type != null && name != null) { if (log.isDebugEnabled()) { log.debug("Check the default " + type + "sequences for " + name); } requiredSequence = getDefaultSequence(type, name); if (requiredSequence == null) { if (log.isDebugEnabled()) { log.debug("Check the custom " + type +" sequences for " + name); } requiredSequence = getCustomSequence(apiIdentifier, type, name); } //Convert the content stream to a string. if (requiredSequence != null) { sequenceStream = requiredSequence.getContentStream(); StringWriter stringWriter = new StringWriter(); IOUtils.copy(sequenceStream, stringWriter); sequenceText = stringWriter.toString(); } else { log.error("No sequence for the name " + name + "is found!"); } } else { log.error("Invalid arguments."); } } catch (APIManagementException e) { log.error(e.getMessage()); throw new APIManagementException(e); } catch (RegistryException e) { log.error(e.getMessage()); throw new APIManagementException(e); } catch (IOException e) { log.error(e.getMessage()); throw new APIManagementException(e); } return sequenceText; } /** * Get the mediation sequence which matches the given type and name from the custom sequences. * * @param type : The sequence type. * @param name : The name of the sequence. * @return : The mediation sequence which matches the given parameters. Returns null if no matching sequence is * found. */ private Resource getDefaultSequence(String type, String name) throws APIManagementException { String defaultSequenceFileLocation = ""; try { UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); if (APIConstants.FAULT_SEQUENCE.equals(type)) { defaultSequenceFileLocation = APIConstants.API_CUSTOM_FAULTSEQUENCE_LOCATION; } else if (APIConstants.OUT_SEQUENCE.equals(type)) { defaultSequenceFileLocation = APIConstants.API_CUSTOM_OUTSEQUENCE_LOCATION; } else { defaultSequenceFileLocation = APIConstants.API_CUSTOM_INSEQUENCE_LOCATION; } if (registry.resourceExists(defaultSequenceFileLocation)) { org.wso2.carbon.registry.api.Collection defaultSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(defaultSequenceFileLocation); if (defaultSeqCollection != null) { String[] faultSeqChildPaths = defaultSeqCollection.getChildren(); for (String defaultSeqChildPath : faultSeqChildPaths) { Resource defaultSequence = registry.get(defaultSeqChildPath); OMElement seqElement = APIUtil.buildOMElement(defaultSequence.getContentStream()); if (name.equals(seqElement.getAttributeValue(new QName("name")))) { return defaultSequence; } } } } } catch (RegistryException e) { throw new APIManagementException("Error while retrieving registry for tenant " + tenantId, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { throw new APIManagementException("Error while processing the " + defaultSequenceFileLocation + " in the registry", e); } catch (Exception e) { throw new APIManagementException("Error while building the OMElement from the sequence " + name, e); } return null; } /** * Get the resource which matches the user selected resource type and the name from the custom uploaded sequences. * * @param identifier : The API Identifier. * @param type : The sequence type. * @return : Resource object which matches the parameters. If no resource found, return null. */ private Resource getCustomSequence(APIIdentifier identifier, String type, String name) throws APIManagementException { Resource customSequence = null; boolean isTenantFlowStarted = false; try { String tenantDomain = null; if (identifier.getProviderName().contains("-AT-")) { String provider = identifier.getProviderName().replace("-AT-", "@"); tenantDomain = MultitenantUtils.getTenantDomain(provider); } if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; } if (!StringUtils.isEmpty(tenantDomain)) { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } else { PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME, true); } UserRegistry registry = ServiceReferenceHolder.getInstance().getRegistryService() .getGovernanceSystemRegistry(tenantId); String customSeqFileLocation = ""; if (APIConstants.FAULT_SEQUENCE.equals(type)) { customSeqFileLocation = APIUtil.getSequencePath(identifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_FAULT); } else if (APIConstants.OUT_SEQUENCE.equals(type)) { customSeqFileLocation = APIUtil.getSequencePath(identifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT); } else { customSeqFileLocation = APIUtil.getSequencePath(identifier, APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN); } if (registry.resourceExists(customSeqFileLocation)) { org.wso2.carbon.registry.api.Collection customSeqCollection = (org.wso2.carbon.registry.api.Collection) registry.get(customSeqFileLocation); if (customSeqCollection != null) { String[] faultSeqChildPaths = customSeqCollection.getChildren(); for (String customSeqChildPath : faultSeqChildPaths) { customSequence = registry.get(customSeqChildPath); OMElement seqElement = APIUtil.buildOMElement(customSequence.getContentStream()); if (name.equals(seqElement.getAttributeValue(new QName("name")))) { return customSequence; } } } } } catch (RegistryException e) { throw new APIManagementException("Error while retrieving registry for tenant " + tenantId, e); } catch (org.wso2.carbon.registry.api.RegistryException e) { throw new APIManagementException("Error while processing the " + type + " sequences of " + identifier + " in the registry", e); } catch (Exception e) { throw new APIManagementException("Error while building the OMElement from the sequence " + name, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } return null; } /* To check authorization of the API against current logged in user. If the user is not authorized an exception * will be thrown. * * @param identifier API identifier * @throws APIManagementException APIManagementException */ protected void checkAccessControlPermission(Identifier identifier) throws APIManagementException { if (identifier == null || !isAccessControlRestrictionEnabled) { if (!isAccessControlRestrictionEnabled && log.isDebugEnabled()) { log.debug("Publisher access control restriction is not enabled. Hence the API " + identifier + " can be editable and viewable by all the API publishers and creators."); } return; } String resourcePath = StringUtils.EMPTY; String identifierType = StringUtils.EMPTY; if (identifier instanceof APIIdentifier) { resourcePath = APIUtil.getAPIPath((APIIdentifier) identifier); identifierType = APIConstants.API_IDENTIFIER_TYPE; } else if (identifier instanceof APIProductIdentifier) { resourcePath = APIUtil.getAPIProductPath((APIProductIdentifier) identifier); identifierType = APIConstants.API_PRODUCT_IDENTIFIER_TYPE; } try { Registry sysRegistry = getRegistryService().getGovernanceSystemRegistry(); // Need user name with tenant domain to get correct domain name from // MultitenantUtils.getTenantDomain(username) String userNameWithTenantDomain = (userNameWithoutChange != null) ? userNameWithoutChange : username; if (!sysRegistry.resourceExists(resourcePath)) { if (log.isDebugEnabled()) { log.debug("Resource does not exist in the path : " + resourcePath + " this can happen if this is in the " + "middle of the new " + identifierType + " creation, hence not checking the access control"); } return; } Resource resource = sysRegistry.get(resourcePath); if (resource == null) { return; } String accessControlProperty = resource.getProperty(APIConstants.ACCESS_CONTROL); if (accessControlProperty == null || accessControlProperty.trim().isEmpty() || accessControlProperty .equalsIgnoreCase(APIConstants.NO_ACCESS_CONTROL)) { if (log.isDebugEnabled()) { log.debug(identifierType + " in the path " + resourcePath + " does not have any access control restriction"); } return; } if (APIUtil.hasPermission(userNameWithTenantDomain, APIConstants.Permissions.APIM_ADMIN)) { return; } String publisherAccessControlRoles = resource.getProperty(APIConstants.DISPLAY_PUBLISHER_ROLES); if (publisherAccessControlRoles != null && !publisherAccessControlRoles.trim().isEmpty()) { String[] accessControlRoleList = publisherAccessControlRoles.replaceAll("\\s+", "").split(","); if (log.isDebugEnabled()) { log.debug(identifierType + " has restricted access to creators and publishers with the roles : " + Arrays .toString(accessControlRoleList)); } String[] userRoleList = APIUtil.getListOfRoles(userNameWithTenantDomain); if (log.isDebugEnabled()) { log.debug("User " + username + " has roles " + Arrays.toString(userRoleList)); } for (String role : accessControlRoleList) { if (!role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) && APIUtil .compareRoleList(userRoleList, role)) { return; } } if (log.isDebugEnabled()) { log.debug(identifierType + " " + identifier + " cannot be accessed by user '" + username + "'. It " + "has a publisher access control restriction"); } throw new APIManagementException( APIConstants.UN_AUTHORIZED_ERROR_MESSAGE + " view or modify the " + identifierType + " " + identifier); } } catch (RegistryException e) { throw new APIManagementException( "Registry Exception while trying to check the access control restriction of " + identifierType + " " + identifier .getName(), e); } } @Override public Map<API, List<APIProductResource>> addAPIProductWithoutPublishingToGateway(APIProduct product) throws APIManagementException { Map<API, List<APIProductResource>> apiToProductResourceMapping = new HashMap<>(); validateApiProductInfo(product); String tenantDomain = MultitenantUtils .getTenantDomain(APIUtil.replaceEmailDomainBack(product.getId().getProviderName())); if (log.isDebugEnabled()) { log.debug("API Product details successfully added to the registry. API Product Name: " + product.getId().getName() + ", API Product Version : " + product.getId().getVersion() + ", API Product context : " + "change"); //todo: log context } List<APIProductResource> resources = product.getProductResources(); // list to hold resources which are actually in an existing api. If user has created an API product with invalid // API or invalid resource of a valid API, that content will be removed .validResources array will have only // legitimate apis List<APIProductResource> validResources = new ArrayList<APIProductResource>(); for (APIProductResource apiProductResource : resources) { API api; String apiUUID; if (apiProductResource.getProductIdentifier() != null) { APIIdentifier productAPIIdentifier = apiProductResource.getApiIdentifier(); String emailReplacedAPIProviderName = APIUtil.replaceEmailDomain(productAPIIdentifier.getProviderName()); APIIdentifier emailReplacedAPIIdentifier = new APIIdentifier(emailReplacedAPIProviderName, productAPIIdentifier.getApiName(), productAPIIdentifier.getVersion()); apiUUID = apiMgtDAO.getUUIDFromIdentifier(emailReplacedAPIIdentifier, product.getOrganization()); api = getAPIbyUUID(apiUUID, product.getOrganization()); } else { apiUUID = apiProductResource.getApiId(); api = getAPIbyUUID(apiUUID, product.getOrganization()); // if API does not exist, getLightweightAPIByUUID() method throws exception. } if (api != null) { validateApiLifeCycleForApiProducts(api); if (api.getSwaggerDefinition() != null) { api.setSwaggerDefinition(getOpenAPIDefinition(apiUUID, product.getOrganization())); } if (!apiToProductResourceMapping.containsKey(api)) { apiToProductResourceMapping.put(api, new ArrayList<>()); } List<APIProductResource> apiProductResources = apiToProductResourceMapping.get(api); apiProductResources.add(apiProductResource); apiProductResource.setApiIdentifier(api.getId()); apiProductResource.setProductIdentifier(product.getId()); apiProductResource.setEndpointConfig(api.getEndpointConfig()); apiProductResource.setEndpointSecurityMap(APIUtil.setEndpointSecurityForAPIProduct(api)); URITemplate uriTemplate = apiProductResource.getUriTemplate(); Map<String, URITemplate> templateMap = apiMgtDAO.getURITemplatesForAPI(api); if (uriTemplate == null) { //if no resources are define for the API, we ingore that api for the product } else { String key = uriTemplate.getHTTPVerb() + ":" + uriTemplate.getResourceURI(); if (templateMap.containsKey(key)) { //Since the template ID is not set from the request, we manually set it. uriTemplate.setId(templateMap.get(key).getId()); //request has a valid API id and a valid resource. we add it to valid resource map validResources.add(apiProductResource); } else { //ignore log.warn("API with id " + apiProductResource.getApiId() + " does not have a resource " + uriTemplate.getResourceURI() + " with http method " + uriTemplate.getHTTPVerb()); } } } } //set the valid resources only product.setProductResources(validResources); //now we have validated APIs and it's resources inside the API product. Add it to database // Create registry artifact String apiProductUUID = createAPIProduct(product); product.setUuid(apiProductUUID); // Add to database apiMgtDAO.addAPIProduct(product, product.getOrganization()); return apiToProductResourceMapping; } @Override public void saveToGateway(APIProduct product) throws APIManagementException { List<APIProductResource> productResources = product.getProductResources(); //Only publish to gateways if the state is in Published state and has atleast one resource } public void deleteAPIProduct(APIProduct apiProduct) throws APIManagementException { APIProductIdentifier identifier = apiProduct.getId(); try { //int apiId = apiMgtDAO.getAPIID(identifier, null); long subsCount = apiMgtDAO.getAPISubscriptionCountByAPI(identifier); if (subsCount > 0) { //Logging as a WARN since this isn't an error scenario. String message = "Cannot remove the API Product as active subscriptions exist."; log.warn(message); throw new APIManagementException(message); } // gatewayType check is required when API Management is deployed on // other servers to avoid synapse deleteAPIProductRevisions(apiProduct.getUuid(), apiProduct.getOrganization()); apiPersistenceInstance.deleteAPIProduct(new Organization(apiProduct.getOrganization()), apiProduct.getUuid()); apiMgtDAO.deleteAPIProduct(identifier); if (log.isDebugEnabled()) { String logMessage = "API Product Name: " + identifier.getName() + ", API Product Version " + identifier.getVersion() + " successfully removed from the database."; log.debug(logMessage); } JSONObject apiLogObject = new JSONObject(); apiLogObject.put(APIConstants.AuditLogConstants.NAME, identifier.getName()); apiLogObject.put(APIConstants.AuditLogConstants.VERSION, identifier.getVersion()); apiLogObject.put(APIConstants.AuditLogConstants.PROVIDER, identifier.getProviderName()); APIUtil.logAuditMessage(APIConstants.AuditLogConstants.API_PRODUCT, apiLogObject.toString(), APIConstants.AuditLogConstants.DELETED, this.username); GatewayArtifactsMgtDAO.getInstance().deleteGatewayArtifacts(apiProduct.getUuid()); } catch (APIPersistenceException e) { handleException("Failed to remove the API product", e); } } @Override public void deleteAPIProduct(APIProductIdentifier identifier, String apiProductUUID, String organization) throws APIManagementException { if (StringUtils.isEmpty(apiProductUUID)) { if (identifier.getUUID() != null) { apiProductUUID = identifier.getUUID(); } else { apiProductUUID = apiMgtDAO.getUUIDFromIdentifier(identifier, organization); } } APIProduct apiProduct = getAPIProductbyUUID(apiProductUUID, organization); apiProduct.setOrganization(organization); deleteAPIProduct(apiProduct); } @Override public Map<API, List<APIProductResource>> updateAPIProduct(APIProduct product) throws APIManagementException, FaultGatewaysException { Map<API, List<APIProductResource>> apiToProductResourceMapping = new HashMap<>(); //validate resources and set api identifiers and resource ids to product List<APIProductResource> resources = product.getProductResources(); for (APIProductResource apiProductResource : resources) { API api; APIProductIdentifier productIdentifier = apiProductResource.getProductIdentifier(); String apiUUID; if (productIdentifier != null) { APIIdentifier productAPIIdentifier = apiProductResource.getApiIdentifier(); String emailReplacedAPIProviderName = APIUtil.replaceEmailDomain(productAPIIdentifier.getProviderName()); APIIdentifier emailReplacedAPIIdentifier = new APIIdentifier(emailReplacedAPIProviderName, productAPIIdentifier.getApiName(), productAPIIdentifier.getVersion()); apiUUID = apiMgtDAO.getUUIDFromIdentifier(emailReplacedAPIIdentifier, product.getOrganization()); api = getAPIbyUUID(apiUUID, tenantDomain); } else { apiUUID = apiProductResource.getApiId(); api = getAPIbyUUID(apiUUID, tenantDomain); } if (api.getSwaggerDefinition() != null) { api.setSwaggerDefinition(getOpenAPIDefinition(apiUUID, tenantDomain)); } if (!apiToProductResourceMapping.containsKey(api)) { apiToProductResourceMapping.put(api, new ArrayList<>()); } List<APIProductResource> apiProductResources = apiToProductResourceMapping.get(api); apiProductResources.add(apiProductResource); // if API does not exist, getLightweightAPIByUUID() method throws exception. so no need to handle NULL apiProductResource.setApiIdentifier(api.getId()); apiProductResource.setProductIdentifier(product.getId()); apiProductResource.setEndpointConfig(api.getEndpointConfig()); apiProductResource.setEndpointSecurityMap(APIUtil.setEndpointSecurityForAPIProduct(api)); URITemplate uriTemplate = apiProductResource.getUriTemplate(); Map<String, URITemplate> templateMap = apiMgtDAO.getURITemplatesForAPI(api); if (uriTemplate == null) { // TODO handle if no resource is defined. either throw an error or add all the resources of that API // to the product } else { String key = uriTemplate.getHTTPVerb() + ":" + uriTemplate.getUriTemplate(); if (templateMap.containsKey(key)) { //Since the template ID is not set from the request, we manually set it. uriTemplate.setId(templateMap.get(key).getId()); } else { throw new APIManagementException("API with id " + apiProductResource.getApiId() + " does not have a resource " + uriTemplate.getUriTemplate() + " with http method " + uriTemplate.getHTTPVerb()); } } } APIProduct oldApi = getAPIProductbyUUID(product.getUuid(), CarbonContext.getThreadLocalCarbonContext().getTenantDomain()); Gson gson = new Gson(); Map<String, String> oldMonetizationProperties = gson.fromJson(oldApi.getMonetizationProperties().toString(), HashMap.class); if (oldMonetizationProperties != null && !oldMonetizationProperties.isEmpty()) { Map<String, String> newMonetizationProperties = gson.fromJson(product.getMonetizationProperties().toString(), HashMap.class); if (newMonetizationProperties != null) { for (Map.Entry<String, String> entry : oldMonetizationProperties.entrySet()) { String newValue = newMonetizationProperties.get(entry.getKey()); if (StringUtils.isAllBlank(newValue)) { newMonetizationProperties.put(entry.getKey(), entry.getValue()); } } JSONParser parser = new JSONParser(); try { JSONObject jsonObj = (JSONObject) parser.parse(gson.toJson(newMonetizationProperties)); product.setMonetizationProperties(jsonObj); } catch (ParseException e) { throw new APIManagementException("Error when parsing monetization properties ", e); } } } invalidateResourceCache(product.getContext(), product.getId().getVersion(), Collections.EMPTY_SET); //todo : check whether permissions need to be updated and pass it along updateApiProductArtifact(product, true, true); apiMgtDAO.updateAPIProduct(product, userNameWithoutChange); int productId = apiMgtDAO.getAPIProductId(product.getId()); APIEvent apiEvent = new APIEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.API_UPDATE.name(), tenantId, tenantDomain, product.getId().getName(), productId, product.getId().getUUID(), product.getId().getVersion(), product.getType(), product.getContext(), product.getId().getProviderName(), APIConstants.LC_PUBLISH_LC_STATE); APIUtil.sendNotification(apiEvent, APIConstants.NotifierType.API.name()); return apiToProductResourceMapping; } @Override public List<ResourcePath> getResourcePathsOfAPI(APIIdentifier apiId) throws APIManagementException { return apiMgtDAO.getResourcePathsOfAPI(apiId); } private void validateApiLifeCycleForApiProducts(API api) throws APIManagementException { String status = api.getStatus(); if (APIConstants.BLOCKED.equals(status) || APIConstants.PROTOTYPED.equals(status) || APIConstants.DEPRECATED.equals(status) || APIConstants.RETIRED.equals(status)) { throw new APIManagementException("Cannot create API Product using API with following status: " + status, ExceptionCodes.from(ExceptionCodes.API_PRODUCT_WITH_UNSUPPORTED_LIFECYCLE_API, status)); } } /** * Validates the name of api product against illegal characters. * * @param product APIProduct info object * @throws APIManagementException */ private void validateApiProductInfo(APIProduct product) throws APIManagementException { String apiName = product.getId().getName(); if (apiName == null) { handleException("API Name is required."); } else if (containsIllegals(apiName)) { handleException("API Name contains one or more illegal characters " + "( " + APIConstants.REGEX_ILLEGAL_CHARACTERS_FOR_API_METADATA + " )"); } //version is not a mandatory field for now if (!hasValidLength(apiName, APIConstants.MAX_LENGTH_API_NAME) || !hasValidLength(product.getId().getVersion(), APIConstants.MAX_LENGTH_VERSION) || !hasValidLength(product.getId().getProviderName(), APIConstants.MAX_LENGTH_PROVIDER) || !hasValidLength(product.getContext(), APIConstants.MAX_LENGTH_CONTEXT)) { throw new APIManagementException("Character length exceeds the allowable limit", ExceptionCodes.LENGTH_EXCEEDS); } } /** * Create an Api Product * * @param apiProduct API Product * @throws APIManagementException if failed to create APIProduct */ protected String createAPIProduct(APIProduct apiProduct) throws APIManagementException { String apiProductUUID = null; // Validate Transports and Security validateAndSetTransports(apiProduct); validateAndSetAPISecurity(apiProduct); PublisherAPIProduct publisherAPIProduct = APIProductMapper.INSTANCE.toPublisherApiProduct(apiProduct); PublisherAPIProduct addedAPIProduct; try { publisherAPIProduct.setApiProductName(apiProduct.getId().getName()); publisherAPIProduct.setProviderName(apiProduct.getId().getProviderName()); publisherAPIProduct.setVersion(apiProduct.getId().getVersion()); addedAPIProduct = apiPersistenceInstance.addAPIProduct( new Organization(CarbonContext.getThreadLocalCarbonContext().getTenantDomain()), publisherAPIProduct); apiProductUUID = addedAPIProduct.getId(); } catch (APIPersistenceException e) { throw new APIManagementException("Error while creating API product ", e); } return apiProductUUID; } private void changeLifeCycleStatusToPublish(APIProductIdentifier apiIdentifier) throws APIManagementException { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(this.username); PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(this.tenantDomain, true); String productArtifactId = registry.get(APIUtil.getAPIProductPath(apiIdentifier)).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.API_KEY); GenericArtifact apiArtifact = artifactManager.getGenericArtifact(productArtifactId); if (apiArtifact != null) { apiArtifact.invokeAction("Publish", APIConstants.API_LIFE_CYCLE); if (log.isDebugEnabled()) { String logMessage = "API Product Status changed successfully. API Product Name: " + apiIdentifier.getName(); log.debug(logMessage); } } } catch (RegistryException e) { throw new APIManagementException("Error while Changing Lifecycle status of API Product " + apiIdentifier.getName(), e); } finally { PrivilegedCarbonContext.endTenantFlow(); } } /** * Update API Product Artifact in Registry * * @param apiProduct * @param updateMetadata * @param updatePermissions * @throws APIManagementException */ private void updateApiProductArtifact(APIProduct apiProduct, boolean updateMetadata, boolean updatePermissions) throws APIManagementException { //Validate Transports and Security validateAndSetTransports(apiProduct); validateAndSetAPISecurity(apiProduct); PublisherAPIProduct publisherAPIProduct = APIProductMapper.INSTANCE.toPublisherApiProduct(apiProduct); PublisherAPIProduct addedAPIProduct; try { publisherAPIProduct.setApiProductName(apiProduct.getId().getName()); publisherAPIProduct.setProviderName(apiProduct.getId().getProviderName()); publisherAPIProduct.setVersion(apiProduct.getId().getVersion()); addedAPIProduct = apiPersistenceInstance.updateAPIProduct( new Organization(CarbonContext.getThreadLocalCarbonContext().getTenantDomain()), publisherAPIProduct); } catch (APIPersistenceException e) { throw new APIManagementException("Error while creating API product "); } } public void updateProductResourceMappings(API api, String organization, List<APIProductResource> productResources) throws APIManagementException { //get uri templates of API again Map<String, URITemplate> apiResources = apiMgtDAO.getURITemplatesForAPI(api); for (APIProductResource productResource : productResources) { URITemplate uriTemplate = productResource.getUriTemplate(); String productResourceKey = uriTemplate.getHTTPVerb() + ":" + uriTemplate.getUriTemplate(); //set new uri template ID to the product resource int updatedURITemplateId = apiResources.get(productResourceKey).getId(); uriTemplate.setId(updatedURITemplateId); } apiMgtDAO.addAPIProductResourceMappings(productResources, organization, null); } /** * Create a product documentation * * @param product APIProduct * @param documentation Documentation * @throws APIManagementException if failed to add documentation */ private void createDocumentation(APIProduct product, Documentation documentation) throws APIManagementException { try { APIProductIdentifier productId = product.getId(); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact artifact = artifactManager.newGovernanceArtifact(new QName(documentation.getName())); artifactManager.addGenericArtifact(APIUtil.createDocArtifactContent(artifact, productId, documentation)); String productPath = APIUtil.getAPIProductPath(productId); //Adding association from api to documentation . (API Product -----> doc) registry.addAssociation(productPath, artifact.getPath(), APIConstants.DOCUMENTATION_ASSOCIATION); String docVisibility = documentation.getVisibility().name(); String[] authorizedRoles = getAuthorizedRoles(productPath); String visibility = product.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } APIUtil.setResourcePermissions(product.getId().getProviderName(),visibility, authorizedRoles, artifact .getPath(), registry); String docFilePath = artifact.getAttribute(APIConstants.DOC_FILE_PATH); if (docFilePath != null && !StringUtils.EMPTY.equals(docFilePath)) { //The docFilePatch comes as /t/tenanatdoman/registry/resource/_system/governance/apimgt/applicationdata.. //We need to remove the /t/tenanatdoman/registry/resource/_system/governance section to set permissions. int startIndex = docFilePath.indexOf(APIConstants.GOVERNANCE) + (APIConstants.GOVERNANCE).length(); String filePath = docFilePath.substring(startIndex, docFilePath.length()); APIUtil.setResourcePermissions(product.getId().getProviderName(),visibility, authorizedRoles, filePath, registry); registry.addAssociation(artifact.getPath(), filePath, APIConstants.DOCUMENTATION_FILE_ASSOCIATION); } documentation.setId(artifact.getId()); } catch (RegistryException e) { handleException("Failed to add documentation", e); } catch (UserStoreException e) { handleException("Failed to add documentation", e); } } /** * Updates a given api product documentation * * @param productId APIProductIdentifier * @param documentation Documentation * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to update docs */ public void updateDocumentation(APIProductIdentifier productId, Documentation documentation) throws APIManagementException { String productPath = APIUtil.getAPIProductPath(productId); APIProduct product = getAPIProduct(productPath); String docPath = APIUtil.getProductDocPath(productId) + documentation.getName(); try { String docArtifactId = registry.get(docPath).getUUID(); GenericArtifactManager artifactManager = APIUtil.getArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact artifact = artifactManager.getGenericArtifact(docArtifactId); String docVisibility = documentation.getVisibility().name(); String[] authorizedRoles = new String[0]; String visibleRolesList = product.getVisibleRoles(); if (visibleRolesList != null) { authorizedRoles = visibleRolesList.split(","); } String visibility = product.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } GenericArtifact updateDocArtifact = APIUtil.createDocArtifactContent(artifact, productId, documentation); artifactManager.updateGenericArtifact(updateDocArtifact); APIUtil.clearResourcePermissions(docPath, productId, ((UserRegistry) registry).getTenantId()); APIUtil.setResourcePermissions(product.getId().getProviderName(), visibility, authorizedRoles, artifact.getPath(), registry); String docFilePath = artifact.getAttribute(APIConstants.DOC_FILE_PATH); if (docFilePath != null && !"".equals(docFilePath)) { // The docFilePatch comes as // /t/tenanatdoman/registry/resource/_system/governance/apimgt/applicationdata.. // We need to remove the // /t/tenanatdoman/registry/resource/_system/governance section // to set permissions. int startIndex = docFilePath.indexOf(APIConstants.GOVERNANCE) + (APIConstants.GOVERNANCE).length(); String filePath = docFilePath.substring(startIndex, docFilePath.length()); APIUtil.setResourcePermissions(product.getId().getProviderName(), visibility, authorizedRoles, filePath, registry); } } catch (RegistryException e) { handleException("Failed to update documentation", e); } } /** * Add a file to a product document of source type FILE * * @param productId APIProduct identifier the document belongs to * @param documentation document * @param filename name of the file * @param content content of the file as an Input Stream * @param contentType content type of the file * @throws APIManagementException if failed to add the file */ public void addFileToProductDocumentation(APIProductIdentifier productId, Documentation documentation, String filename, InputStream content, String contentType) throws APIManagementException { if (Documentation.DocumentSourceType.FILE.equals(documentation.getSourceType())) { contentType = "application/force-download"; ResourceFile icon = new ResourceFile(content, contentType); String filePath = APIUtil.getDocumentationFilePath(productId, filename); APIProduct apiProduct; try { apiProduct = getAPIProduct(productId); String visibleRolesList = apiProduct.getVisibleRoles(); String[] visibleRoles = new String[0]; if (visibleRolesList != null) { visibleRoles = visibleRolesList.split(","); } APIUtil.setResourcePermissions(apiProduct.getId().getProviderName(), apiProduct.getVisibility(), visibleRoles, filePath, registry); documentation.setFilePath(addResourceFile(productId, filePath, icon)); APIUtil.setFilePermission(filePath); } catch (APIManagementException e) { handleException("Failed to add file to product document " + documentation.getName(), e); } } else { String errorMsg = "Cannot add file to the Product Document. Document " + documentation.getName() + "'s Source type is not FILE."; handleException(errorMsg); } } /** * This method used to save the product documentation content * * @param apiProduct, API Product * @param documentationName, name of the inline documentation * @param text, content of the inline documentation * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to add the document as a resource to registry */ public void addProductDocumentationContent(APIProduct apiProduct, String documentationName, String text) throws APIManagementException { APIProductIdentifier identifier = apiProduct.getId(); String documentationPath = APIUtil.getProductDocPath(identifier) + documentationName; String contentPath = APIUtil.getProductDocPath(identifier) + APIConstants.INLINE_DOCUMENT_CONTENT_DIR + RegistryConstants.PATH_SEPARATOR + documentationName; boolean isTenantFlowStarted = false; try { if (tenantDomain != null && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { PrivilegedCarbonContext.startTenantFlow(); isTenantFlowStarted = true; PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantDomain(tenantDomain, true); } Resource docResource = registry.get(documentationPath); GenericArtifactManager artifactManager = new GenericArtifactManager(registry, APIConstants.DOCUMENTATION_KEY); GenericArtifact docArtifact = artifactManager.getGenericArtifact(docResource.getUUID()); Documentation doc = APIUtil.getDocumentation(docArtifact); Resource docContent; if (!registry.resourceExists(contentPath)) { docContent = registry.newResource(); } else { docContent = registry.get(contentPath); } /* This is a temporary fix for doc content replace issue. We need to add * separate methods to add inline content resource in document update */ if (!APIConstants.NO_CONTENT_UPDATE.equals(text)) { docContent.setContent(text); } docContent.setMediaType(APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE); registry.put(contentPath, docContent); registry.addAssociation(documentationPath, contentPath, APIConstants.DOCUMENTATION_CONTENT_ASSOCIATION); String productPath = APIUtil.getAPIProductPath(identifier); String[] authorizedRoles = getAuthorizedRoles(productPath); String docVisibility = doc.getVisibility().name(); String visibility = apiProduct.getVisibility(); if (docVisibility != null) { if (APIConstants.DOC_SHARED_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_SHARED_VISIBILITY; } else if (APIConstants.DOC_OWNER_VISIBILITY.equalsIgnoreCase(docVisibility)) { authorizedRoles = null; visibility = APIConstants.DOC_OWNER_VISIBILITY; } } APIUtil.setResourcePermissions(apiProduct.getId().getProviderName(),visibility, authorizedRoles,contentPath, registry); } catch (RegistryException e) { String msg = "Failed to add the documentation content of : " + documentationName + " of API Product :" + identifier.getName(); handleException(msg, e); } catch (UserStoreException e) { String msg = "Failed to add the documentation content of : " + documentationName + " of API Product :" + identifier.getName(); handleException(msg, e); } finally { if (isTenantFlowStarted) { PrivilegedCarbonContext.endTenantFlow(); } } } @Override public String getGraphqlSchema(APIIdentifier apiId) throws APIManagementException { return getGraphqlSchemaDefinition(apiId); } /** * Check whether the given scope name exists as a shared scope in the tenant domain. * * @param scopeName Shared Scope name * @param tenantId Tenant Id * @return Scope availability * @throws APIManagementException if failed to check the availability */ @Override public boolean isSharedScopeNameExists(String scopeName, int tenantId) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Checking whether scope name: " + scopeName + " exists as a shared scope in tenant with ID: " + tenantId); } return ApiMgtDAO.getInstance().isSharedScopeExists(scopeName, tenantId); } /** * Add Shared Scope by registering it in the KM and adding the scope as a Shared Scope in AM DB. * * @param scope Shared Scope * @param tenantDomain Tenant domain * @return UUId of the added Shared Scope object * @throws APIManagementException if failed to add a scope */ @Override public String addSharedScope(Scope scope, String tenantDomain) throws APIManagementException { Set<Scope> scopeSet = new HashSet<>(); scopeSet.add(scope); int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); addScopes(scopeSet, tenantId); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.registerScope(scope); } catch (APIManagementException e) { log.error("Error occurred while registering Scope in Key Manager " + keyManagerDtoEntry.getKey(), e); } } if (log.isDebugEnabled()) { log.debug("Adding shared scope mapping: " + scope.getKey() + " to Key Manager : " + keyManagerDtoEntry.getKey()); } } return ApiMgtDAO.getInstance().addSharedScope(scope, tenantDomain); } /** * Get all available shared scopes. * * @param tenantDomain tenant domain * @return Shared Scope list * @throws APIManagementException if failed to get the scope list */ @Override public List<Scope> getAllSharedScopes(String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Retrieving all the shared scopes for tenant: " + tenantDomain); } //Get all shared scopes List<Scope> allSharedScopes = ApiMgtDAO.getInstance().getAllSharedScopes(tenantDomain); //Get all scopes from KM List<Scope> allScopes = scopesDAO.getScopes(APIUtil.getTenantIdFromTenantDomain(tenantDomain)); for (Scope scope : allSharedScopes) { for (Scope tempScope : allScopes) { if (scope.getKey().equals(tempScope.getKey())) { scope.setName(tempScope.getName()); scope.setDescription(tempScope.getDescription()); scope.setRoles(tempScope.getRoles()); break; } } } return allSharedScopes; } /** * Get all available shared scope keys. * * @param tenantDomain tenant domain * @return Shared Scope Keyset * @throws APIManagementException if failed to get the scope key set */ @Override public Set<String> getAllSharedScopeKeys(String tenantDomain) throws APIManagementException { //Get all shared scope keys return ApiMgtDAO.getInstance().getAllSharedScopeKeys(tenantDomain); } /** * Get shared scope by UUID. * * @param sharedScopeId Shared scope Id * @param tenantDomain tenant domain * @return Shared Scope * @throws APIManagementException If failed to get the scope */ @Override public Scope getSharedScopeByUUID(String sharedScopeId, String tenantDomain) throws APIManagementException { Scope sharedScope; if (log.isDebugEnabled()) { log.debug("Retrieving shared scope: " + sharedScopeId); } String scopeKey = ApiMgtDAO.getInstance().getSharedScopeKeyByUUID(sharedScopeId); if (scopeKey != null) { sharedScope = scopesDAO.getScope(scopeKey, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); sharedScope.setId(sharedScopeId); } else { throw new APIMgtResourceNotFoundException("Shared Scope not found for scope ID: " + sharedScopeId, ExceptionCodes.from(ExceptionCodes.SHARED_SCOPE_NOT_FOUND, sharedScopeId)); } return sharedScope; } /** * Delete shared scope. * * @param scopeName Shared scope name * @param tenantDomain tenant domain * @throws APIManagementException If failed to delete the scope */ @Override public void deleteSharedScope(String scopeName, String tenantDomain) throws APIManagementException { if (log.isDebugEnabled()) { log.debug("Deleting shared scope " + scopeName); } Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.deleteScope(scopeName); } catch (APIManagementException e) { log.error("Error while Deleting Shared Scope " + scopeName + " from Key Manager " + keyManagerEntry.getKey(), e); } } } apiMgtDAO.deleteSharedScope(scopeName, tenantDomain); deleteScope(scopeName, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); } /** * Update a shared scope. * * @param sharedScope Shared Scope * @param tenantDomain tenant domain * @throws APIManagementException If failed to update */ @Override public void updateSharedScope(Scope sharedScope, String tenantDomain) throws APIManagementException { int tenantId = APIUtil.getTenantIdFromTenantDomain(tenantDomain); Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerEntry.getValue().getKeyManager(); if (keyManager != null) { try { keyManager.updateScope(sharedScope); } catch (APIManagementException e) { log.error("Error while Updating Shared Scope " + sharedScope.getKey() + " from Key Manager " + keyManagerEntry.getKey(), e); } } } updateScope(sharedScope, tenantId); } /** * Validate a shared scopes set. Add the additional attributes (scope description, bindings etc). * * @param scopes Shared scopes set * @throws APIManagementException If failed to validate */ @Override public void validateSharedScopes(Set<Scope> scopes, String tenantDomain) throws APIManagementException { Map<String, KeyManagerDto> tenantKeyManagers = KeyManagerHolder.getTenantKeyManagers(tenantDomain); for (Map.Entry<String, KeyManagerDto> keyManagerDtoEntry : tenantKeyManagers.entrySet()) { KeyManager keyManager = keyManagerDtoEntry.getValue().getKeyManager(); if (keyManager != null) { keyManager.validateScopes(scopes); } } } @Override /** * Get the API and URI usages of the given shared scope * * @param uuid UUID of the shared scope * @param tenantId ID of the Tenant domain * @throws APIManagementException If failed to validate */ public SharedScopeUsage getSharedScopeUsage(String uuid, int tenantId) throws APIManagementException { return ApiMgtDAO.getInstance().getSharedScopeUsage(uuid, tenantId); } /** * This method returns the security audit properties * * @param userId user id * @return JSONObject security audit properties * @throws APIManagementException */ public JSONObject getSecurityAuditAttributesFromConfig(String userId) throws APIManagementException { String tenantDomain = MultitenantUtils.getTenantDomain(userId); JSONObject securityAuditConfig = APIUtil.getSecurityAuditAttributesFromRegistry(tenantDomain); if (securityAuditConfig != null) { if ((securityAuditConfig.get(APIConstants.SECURITY_AUDIT_OVERRIDE_GLOBAL) != null) && securityAuditConfig.get(APIConstants.SECURITY_AUDIT_OVERRIDE_GLOBAL) instanceof Boolean && (Boolean) securityAuditConfig.get(APIConstants.SECURITY_AUDIT_OVERRIDE_GLOBAL)) { String apiToken = (String) securityAuditConfig.get(APIConstants.SECURITY_AUDIT_API_TOKEN); String collectionId = (String) securityAuditConfig.get(APIConstants.SECURITY_AUDIT_COLLECTION_ID); JSONObject tenantProperties = new JSONObject(); if (StringUtils.isNotEmpty(apiToken) && StringUtils.isNotEmpty(collectionId)) { tenantProperties.put(APIConstants.SECURITY_AUDIT_API_TOKEN, apiToken); tenantProperties.put(APIConstants.SECURITY_AUDIT_COLLECTION_ID, collectionId); return tenantProperties; } } else { return getSecurityAuditConfigurationProperties(tenantDomain); } } else { return getSecurityAuditConfigurationProperties(tenantDomain); } return null; } @Override public void saveAsyncApiDefinition(API api, String jsonText) throws APIManagementException { String apiId; String organization = api.getOrganization(); if (api.getUuid() != null) { apiId = api.getUuid(); } else if (api.getId().getUUID() != null) { apiId = api.getId().getUUID(); } else { apiId = apiMgtDAO.getUUIDFromIdentifier(api.getId().getProviderName(), api.getId().getApiName(), api.getId().getVersion(), organization); } try { apiPersistenceInstance.saveAsyncDefinition(new Organization(organization), apiId, jsonText); } catch (AsyncSpecPersistenceException e) { throw new APIManagementException("Error while persisting Async API definition ", e); } } /** * This method returns security audit properties from the API Manager Configuration * * @param tenantDomain tenant domain name * @return JSONObject security audit properties */ private JSONObject getSecurityAuditConfigurationProperties(String tenantDomain) { APIManagerConfiguration configuration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String apiToken = configuration.getFirstProperty(APIConstants.API_SECURITY_AUDIT_API_TOKEN); String collectionId = configuration.getFirstProperty(APIConstants.API_SECURITY_AUDIT_CID); String baseUrl = configuration.getFirstProperty(APIConstants.API_SECURITY_AUDIT_BASE_URL); boolean isGlobal = Boolean.parseBoolean(configuration.getFirstProperty(APIConstants.API_SECURITY_AUDIT_GLOBAL)); JSONObject configProperties = new JSONObject(); if (StringUtils.isNotEmpty(apiToken) && StringUtils.isNotEmpty(collectionId)) { configProperties.put(APIConstants.SECURITY_AUDIT_API_TOKEN, apiToken); configProperties.put(APIConstants.SECURITY_AUDIT_COLLECTION_ID, collectionId); configProperties.put(APIConstants.SECURITY_AUDIT_BASE_URL, baseUrl); if (isGlobal || "carbon.super".equals(tenantDomain)) { return configProperties; } else { return null; } } return null; } @Override public List<APIResource> getRemovedProductResources(Set<URITemplate> updatedUriTemplates, API existingAPI) { Set<URITemplate> existingUriTemplates = existingAPI.getUriTemplates(); List<APIResource> removedReusedResources = new ArrayList<>(); for (URITemplate existingUriTemplate : existingUriTemplates) { // If existing URITemplate is used by any API Products if (!existingUriTemplate.retrieveUsedByProducts().isEmpty()) { String existingVerb = existingUriTemplate.getHTTPVerb(); String existingPath = existingUriTemplate.getUriTemplate(); boolean isReusedResourceRemoved = true; for (URITemplate updatedUriTemplate : updatedUriTemplates) { String updatedVerb = updatedUriTemplate.getHTTPVerb(); String updatedPath = updatedUriTemplate.getUriTemplate(); //Check if existing reused resource is among updated resources if (existingVerb.equalsIgnoreCase(updatedVerb) && existingPath.equalsIgnoreCase(updatedPath)) { isReusedResourceRemoved = false; break; } } // Existing reused resource is not among updated resources if (isReusedResourceRemoved) { APIResource removedResource = new APIResource(existingVerb, existingPath); removedReusedResources.add(removedResource); } } } return removedReusedResources; } private void addScopes(Set<Scope> scopes, int tenantId) throws APIManagementException { if (scopes != null) { scopesDAO.addScopes(scopes, tenantId); for (Scope scope : scopes) { ScopeEvent scopeEvent = new ScopeEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.SCOPE_CREATE.name(), tenantId, tenantDomain, scope.getKey(), scope.getName(), scope.getDescription()); if (StringUtils.isNotEmpty(scope.getRoles()) && scope.getRoles().trim().length() > 0) { scopeEvent.setRoles(Arrays.asList(scope.getRoles().split(","))); } APIUtil.sendNotification(scopeEvent, APIConstants.NotifierType.SCOPE.name()); } } } private void updateScope(Scope scope, int tenantId) throws APIManagementException { if (scope != null) { scopesDAO.updateScope(scope, tenantId); ScopeEvent scopeEvent = new ScopeEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.SCOPE_UPDATE.name(), tenantId, tenantDomain, scope.getKey(), scope.getName(), scope.getDescription()); if (StringUtils.isNotEmpty(scope.getRoles()) && scope.getRoles().trim().length() > 0) { scopeEvent.setRoles(Arrays.asList(scope.getRoles().split(","))); } APIUtil.sendNotification(scopeEvent, APIConstants.NotifierType.SCOPE.name()); } } private void deleteScope(String scopeKey, int tenantId) throws APIManagementException { if (StringUtils.isNotEmpty(scopeKey)) { scopesDAO.deleteScope(scopeKey, tenantId); ScopeEvent scopeEvent = new ScopeEvent(UUID.randomUUID().toString(), System.currentTimeMillis(), APIConstants.EventType.SCOPE_DELETE.name(), tenantId, tenantDomain, scopeKey, null, null); APIUtil.sendNotification(scopeEvent, APIConstants.NotifierType.SCOPE.name()); } } private void deleteScopes(Set<String> scopes, int tenantId) throws APIManagementException { if (scopes != null) { for (String scope : scopes) { deleteScope(scope, tenantId); } } } @Override public API getAPIbyUUID(String uuid, String organization) throws APIManagementException { Organization org = new Organization(organization); try { PublisherAPI publisherAPI = apiPersistenceInstance.getPublisherAPI(org, uuid); if (publisherAPI != null) { API api = APIMapper.INSTANCE.toApi(publisherAPI); APIIdentifier apiIdentifier = api.getId(); apiIdentifier.setUuid(uuid); api.setId(apiIdentifier); checkAccessControlPermission(userNameWithoutChange, api.getAccessControl(), api.getAccessControlRoles()); /////////////////// Do processing on the data object////////// populateRevisionInformation(api, uuid); populateAPIInformation(uuid, organization, api); loadMediationPoliciesToAPI(api, organization); populateAPIStatus(api); populateDefaultVersion(api); return api; } else { String msg = "Failed to get API. API artifact corresponding to artifactId " + uuid + " does not exist"; throw new APIMgtResourceNotFoundException(msg); } } catch (APIPersistenceException e) { throw new APIManagementException("Failed to get API", e); } catch (OASPersistenceException e) { throw new APIManagementException("Error while retrieving the OAS definition", e); } catch (ParseException e) { throw new APIManagementException("Error while parsing the OAS definition", e); } catch (AsyncSpecPersistenceException e) { throw new APIManagementException("Error while retrieving the Async API definition", e); } } private void populateAPITier(APIProduct apiProduct) throws APIManagementException { if (apiProduct.isRevision()) { String apiLevelTier = apiMgtDAO.getAPILevelTier(apiProduct.getRevisionedApiProductId(), apiProduct.getUuid()); apiProduct.setProductLevelPolicy(apiLevelTier); } } private void populateRevisionInformation(API api, String revisionUUID) throws APIManagementException { APIRevision apiRevision = apiMgtDAO.checkAPIUUIDIsARevisionUUID(revisionUUID); if (apiRevision != null && !StringUtils.isEmpty(apiRevision.getApiUUID())) { api.setRevision(true); api.setRevisionedApiId(apiRevision.getApiUUID()); api.setRevisionId(apiRevision.getId()); } } private void populateRevisionInformation(APIProduct apiProduct, String revisionUUID) throws APIManagementException { APIRevision apiRevision = apiMgtDAO.checkAPIUUIDIsARevisionUUID(revisionUUID); if (apiRevision != null && !StringUtils.isEmpty(apiRevision.getApiUUID())) { apiProduct.setRevision(true); apiProduct.setRevisionedApiProductId(apiRevision.getApiUUID()); apiProduct.setRevisionId(apiRevision.getId()); } } private void populateAPIStatus(API api) throws APIManagementException { if (api.isRevision()) { api.setStatus(apiMgtDAO.getAPIStatusFromAPIUUID(api.getRevisionedApiId())); } else { api.setStatus(apiMgtDAO.getAPIStatusFromAPIUUID(api.getUuid())); } } private void populateAPIStatus(APIProduct apiProduct) throws APIManagementException { if (apiProduct.isRevision()) { apiProduct.setState(apiMgtDAO.getAPIStatusFromAPIUUID(apiProduct.getRevisionedApiProductId())); } else { apiProduct.setState(apiMgtDAO.getAPIStatusFromAPIUUID(apiProduct.getUuid())); } } public APIProduct getAPIProductbyUUID(String uuid, String organization) throws APIManagementException { try { Organization org = new Organization(organization); PublisherAPIProduct publisherAPIProduct = apiPersistenceInstance.getPublisherAPIProduct(org, uuid); if (publisherAPIProduct != null) { APIProduct product = APIProductMapper.INSTANCE.toApiProduct(publisherAPIProduct); product.setID(new APIProductIdentifier(publisherAPIProduct.getProviderName(), publisherAPIProduct.getApiProductName(), publisherAPIProduct.getVersion(), uuid)); checkAccessControlPermission(userNameWithoutChange, product.getAccessControl(), product.getAccessControlRoles()); populateAPIProductInformation(uuid, organization, product); populateRevisionInformation(product, uuid); populateAPIStatus(product); populateAPITier(product); return product; } else { String msg = "Failed to get API Product. API Product artifact corresponding to artifactId " + uuid + " does not exist"; throw new APIMgtResourceNotFoundException(msg); } } catch (APIPersistenceException | OASPersistenceException | ParseException e) { String msg = "Failed to get API Product"; throw new APIManagementException(msg, e); } } @Override public Map<String, Object> searchPaginatedAPIs(String searchQuery, String organization, int start, int end, String sortBy, String sortOrder) throws APIManagementException { Map<String, Object> result = new HashMap<String, Object>(); if (log.isDebugEnabled()) { log.debug("Original search query received : " + searchQuery); } Organization org = new Organization(organization); String[] roles = APIUtil.getFilteredUserRoles(userNameWithoutChange); Map<String, Object> properties = APIUtil.getUserProperties(userNameWithoutChange); UserContext userCtx = new UserContext(userNameWithoutChange, org, properties, roles); try { PublisherAPISearchResult searchAPIs = apiPersistenceInstance.searchAPIsForPublisher(org, searchQuery, start, end, userCtx, sortBy, sortOrder); if (log.isDebugEnabled()) { log.debug("searched APIs for query : " + searchQuery + " :-->: " + searchAPIs.toString()); } Set<Object> apiSet = new LinkedHashSet<>(); if (searchAPIs != null) { List<PublisherAPIInfo> list = searchAPIs.getPublisherAPIInfoList(); List<Object> apiList = new ArrayList<>(); for (PublisherAPIInfo publisherAPIInfo : list) { API mappedAPI = APIMapper.INSTANCE.toApi(publisherAPIInfo); populateAPIStatus(mappedAPI); populateDefaultVersion(mappedAPI); apiList.add(mappedAPI); } apiSet.addAll(apiList); result.put("apis", apiSet); result.put("length", searchAPIs.getTotalAPIsCount()); result.put("isMore", true); } else { result.put("apis", apiSet); result.put("length", 0); result.put("isMore", false); } } catch (APIPersistenceException e) { throw new APIManagementException("Error while searching the api ", e); } return result ; } @Override public String addComment(String uuid, Comment comment, String user) throws APIManagementException { return apiMgtDAO.addComment(uuid, comment, user); } @Override public Comment getComment(ApiTypeWrapper apiTypeWrapper, String commentId, Integer replyLimit, Integer replyOffset) throws APIManagementException { return apiMgtDAO.getComment(apiTypeWrapper, commentId, replyLimit, replyOffset); } @Override public org.wso2.carbon.apimgt.api.model.CommentList getComments(ApiTypeWrapper apiTypeWrapper, String parentCommentID, Integer replyLimit, Integer replyOffset) throws APIManagementException { return apiMgtDAO.getComments(apiTypeWrapper, parentCommentID, replyLimit, replyOffset); } @Override public boolean editComment(ApiTypeWrapper apiTypeWrapper, String commentId, Comment comment) throws APIManagementException { return apiMgtDAO.editComment(apiTypeWrapper, commentId, comment); } @Override public boolean deleteComment(ApiTypeWrapper apiTypeWrapper, String commentId) throws APIManagementException { return apiMgtDAO.deleteComment(apiTypeWrapper, commentId); } /** * Get minimal details of API by registry artifact id * * @param uuid Registry artifact id * @param organization identifier of the organization * @return API of the provided artifact id * @throws APIManagementException */ @Override public API getLightweightAPIByUUID(String uuid, String organization) throws APIManagementException { try { Organization org = new Organization(organization); PublisherAPI publisherAPI = apiPersistenceInstance.getPublisherAPI(org, uuid); if (publisherAPI != null) { API api = APIMapper.INSTANCE.toApi(publisherAPI); checkAccessControlPermission(userNameWithoutChange, api.getAccessControl(), api.getAccessControlRoles()); /// populate relavant external info // environment String environmentString = null; if (api.getEnvironments() != null) { environmentString = String.join(",", api.getEnvironments()); } api.setEnvironments(APIUtil.extractEnvironmentsForAPI(environmentString)); //CORS . if null is returned, set default config from the configuration if (api.getCorsConfiguration() == null) { api.setCorsConfiguration(APIUtil.getDefaultCorsConfiguration()); } api.setOrganization(organization); return api; } else { String msg = "Failed to get API. API artifact corresponding to artifactId " + uuid + " does not exist"; throw new APIMgtResourceNotFoundException(msg); } } catch (APIPersistenceException e) { String msg = "Failed to get API with uuid " + uuid; throw new APIManagementException(msg, e); } } @Override public List<APIResource> getUsedProductResources(String uuid) throws APIManagementException { List<APIResource> usedProductResources = new ArrayList<>(); Map<Integer, URITemplate> uriTemplates = ApiMgtDAO.getInstance().getURITemplatesOfAPIWithProductMapping(uuid); for (URITemplate uriTemplate : uriTemplates.values()) { // If existing URITemplate is used by any API Products if (!uriTemplate.retrieveUsedByProducts().isEmpty()) { APIResource apiResource = new APIResource(uriTemplate.getHTTPVerb(), uriTemplate.getUriTemplate()); usedProductResources.add(apiResource); } } return usedProductResources; } @Override public void addDocumentationContent(String uuid, String docId, String organization, DocumentationContent content) throws APIManagementException { DocumentContent mappedContent = null; try { mappedContent = DocumentMapper.INSTANCE.toDocumentContent(content); DocumentContent doc = apiPersistenceInstance.addDocumentationContent(new Organization(organization), uuid, docId, mappedContent); } catch (DocumentationPersistenceException e) { throw new APIManagementException("Error while adding content to doc " + docId); } } @Override public void addWSDLResource(String apiId, ResourceFile resource, String url, String organization) throws APIManagementException { if (!StringUtils.isEmpty(url)) { URL wsdlUrl; try { wsdlUrl = new URL(url); } catch (MalformedURLException e) { throw new APIManagementException("Invalid/Malformed WSDL URL : " + url, e, ExceptionCodes.INVALID_WSDL_URL_EXCEPTION); } // Get the WSDL 1.1 or 2.0 processor and process the content based on the version WSDLProcessor wsdlProcessor = APIMWSDLReader.getWSDLProcessorForUrl(wsdlUrl); InputStream wsdlContent = wsdlProcessor.getWSDL(); // wsdlResource.setContentStream(wsdlContent); org.wso2.carbon.apimgt.persistence.dto.ResourceFile wsdlResourceFile = new org.wso2.carbon.apimgt.persistence.dto.ResourceFile( wsdlContent, null); try { apiPersistenceInstance.saveWSDL( new Organization(organization), apiId, wsdlResourceFile); } catch (WSDLPersistenceException e) { throw new APIManagementException("Error while adding WSDL to api " + apiId, e); } } else if (resource != null) { org.wso2.carbon.apimgt.persistence.dto.ResourceFile wsdlResourceFile = new org.wso2.carbon.apimgt.persistence.dto.ResourceFile( resource.getContent(), resource.getContentType()); try { apiPersistenceInstance.saveWSDL( new Organization(organization), apiId, wsdlResourceFile); } catch (WSDLPersistenceException e) { throw new APIManagementException("Error while adding WSDL to api " + apiId, e); } } } @Override public Map<String, Object> searchPaginatedContent(String searchQuery, String organization, int start, int end) throws APIManagementException { ArrayList<Object> compoundResult = new ArrayList<Object>(); Map<Documentation, API> docMap = new HashMap<Documentation, API>(); Map<Documentation, APIProduct> productDocMap = new HashMap<Documentation, APIProduct>(); Map<String, Object> result = new HashMap<String, Object>(); SortedSet<API> apiSet = new TreeSet<API>(new APINameComparator()); SortedSet<APIProduct> apiProductSet = new TreeSet<APIProduct>(new APIProductNameComparator()); String userame = userNameWithoutChange; Organization org = new Organization(organization); Map<String, Object> properties = APIUtil.getUserProperties(userame); String[] roles = APIUtil.getFilteredUserRoles(userame); UserContext ctx = new UserContext(userame, org, properties, roles); try { PublisherContentSearchResult results = apiPersistenceInstance.searchContentForPublisher(org, searchQuery, start, end, ctx); if (results != null) { List<SearchContent> resultList = results.getResults(); for (SearchContent item : resultList) { if ("API".equals(item.getType())) { PublisherSearchContent publiserAPI = (PublisherSearchContent) item; API api = new API(new APIIdentifier(publiserAPI.getProvider(), publiserAPI.getName(), publiserAPI.getVersion())); api.setUuid(publiserAPI.getId()); api.setContext(publiserAPI.getContext()); api.setContextTemplate(publiserAPI.getContext()); api.setStatus(publiserAPI.getStatus()); apiSet.add(api); } else if ("APIProduct".equals(item.getType())) { PublisherSearchContent publiserAPI = (PublisherSearchContent) item; APIProduct api = new APIProduct(new APIProductIdentifier(publiserAPI.getProvider(), publiserAPI.getName(), publiserAPI.getVersion())); api.setUuid(publiserAPI.getId()); api.setContextTemplate(publiserAPI.getContext()); api.setState(publiserAPI.getStatus()); apiProductSet.add(api); } else if (item instanceof DocumentSearchContent) { // doc item DocumentSearchContent docItem = (DocumentSearchContent) item; Documentation doc = new Documentation( DocumentationType.valueOf(docItem.getDocType().toString()), docItem.getName()); doc.setSourceType(DocumentSourceType.valueOf(docItem.getSourceType().toString())); doc.setVisibility(DocumentVisibility.valueOf(docItem.getVisibility().toString())); doc.setId(docItem.getId()); if ("API".equals(docItem.getAssociatedType())) { API api = new API(new APIIdentifier(docItem.getApiProvider(), docItem.getApiName(), docItem.getApiVersion())); api.setUuid(docItem.getApiUUID()); docMap.put(doc, api); } else if ("APIProduct".equals(docItem.getAssociatedType())) { APIProduct api = new APIProduct(new APIProductIdentifier(docItem.getApiProvider(), docItem.getApiName(), docItem.getApiVersion())); api.setUuid(docItem.getApiUUID()); productDocMap.put(doc, api); } } } compoundResult.addAll(apiSet); compoundResult.addAll(apiProductSet); compoundResult.addAll(docMap.entrySet()); compoundResult.addAll(productDocMap.entrySet()); compoundResult.sort(new ContentSearchResultNameComparator()); result.put("length", results.getTotalCount() ); } else { result.put("length", compoundResult.size() ); } } catch (APIPersistenceException e) { throw new APIManagementException("Error while searching content ", e); } result.put("apis", compoundResult); return result; } @Override public void setThumbnailToAPI(String apiId, ResourceFile resource, String organization) throws APIManagementException { try { org.wso2.carbon.apimgt.persistence.dto.ResourceFile iconResourceFile = new org.wso2.carbon.apimgt.persistence.dto.ResourceFile( resource.getContent(), resource.getContentType()); apiPersistenceInstance.saveThumbnail(new Organization(organization), apiId, iconResourceFile); } catch (ThumbnailPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while saving thumbnail ", e); } } } @Override public List<Mediation> getAllApiSpecificMediationPolicies(String apiId, String organization) throws APIManagementException { List<Mediation> mappedList = new ArrayList<Mediation>(); try { List<MediationInfo> list = apiPersistenceInstance.getAllMediationPolicies( new Organization(organization), apiId); if (list != null) { for (MediationInfo mediationInfo : list) { Mediation mediation = new Mediation(); mediation.setName(mediationInfo.getName()); mediation.setUuid(mediationInfo.getId()); mediation.setType(mediationInfo.getType()); mappedList.add(mediation); } } } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while accessing mediation policies ", e); } } return mappedList; } @Override public Mediation getApiSpecificMediationPolicyByPolicyId(String apiId, String policyId, String organization) throws APIManagementException { try { org.wso2.carbon.apimgt.persistence.dto.Mediation policy = apiPersistenceInstance.getMediationPolicy( new Organization(organization), apiId, policyId); if (policy != null) { Mediation mediation = new Mediation(); mediation.setName(policy.getName()); mediation.setUuid(policy.getId()); mediation.setType(policy.getType()); mediation.setConfig(policy.getConfig()); return mediation; } } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while accessing mediation policies ", e); } } return null; } @Override public Mediation addApiSpecificMediationPolicy(String apiId, Mediation mediationPolicy, String organization) throws APIManagementException { if (StringUtils.isNotBlank(mediationPolicy.getName()) && mediationPolicy.getName().length() > APIConstants.MAX_LENGTH_MEDIATION_POLICY_NAME) { throw new APIManagementException(ExceptionCodes.from(ExceptionCodes.MEDIATION_POLICY_NAME_TOO_LONG, APIConstants.MAX_LENGTH_MEDIATION_POLICY_NAME + "")); } try { org.wso2.carbon.apimgt.persistence.dto.Mediation mappedPolicy = new org.wso2.carbon.apimgt.persistence.dto.Mediation(); mappedPolicy.setConfig(mediationPolicy.getConfig()); mappedPolicy.setName(mediationPolicy.getName()); mappedPolicy.setType(mediationPolicy.getType()); org.wso2.carbon.apimgt.persistence.dto.Mediation returnedMappedPolicy = apiPersistenceInstance .addMediationPolicy(new Organization(organization), apiId, mappedPolicy); if (returnedMappedPolicy != null) { mediationPolicy.setUuid(returnedMappedPolicy.getId()); return mediationPolicy; } } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else if (e.getErrorHandler() == ExceptionCodes.MEDIATION_POLICY_API_ALREADY_EXISTS) { throw new APIManagementException(ExceptionCodes.MEDIATION_POLICY_API_ALREADY_EXISTS); } else { throw new APIManagementException("Error while saving mediation policy ", e); } } return null; } @Override public Mediation updateApiSpecificMediationPolicyContent(String apiId, Mediation mediationPolicy, String organization) throws APIManagementException { try { org.wso2.carbon.apimgt.persistence.dto.Mediation mappedPolicy = new org.wso2.carbon.apimgt.persistence.dto.Mediation(); mappedPolicy.setConfig(mediationPolicy.getConfig()); mappedPolicy.setName(mediationPolicy.getName()); mappedPolicy.setType(mediationPolicy.getType()); mappedPolicy.setId(mediationPolicy.getUuid()); org.wso2.carbon.apimgt.persistence.dto.Mediation returnedMappedPolicy = apiPersistenceInstance .updateMediationPolicy(new Organization(organization), apiId, mappedPolicy); if (returnedMappedPolicy != null) { return mediationPolicy; } } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while saving mediation policy ", e); } } return null; } @Override public void deleteApiSpecificMediationPolicy(String apiId, String mediationPolicyId, String orgId) throws APIManagementException { try { apiPersistenceInstance.deleteMediationPolicy(new Organization(orgId), apiId, mediationPolicyId); } catch (MediationPolicyPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while saving mediation policy ", e); } } } protected void checkAccessControlPermission(String userNameWithTenantDomain, String accessControlProperty, String publisherAccessControlRoles) throws APIManagementException { // String userNameWithTenantDomain = (userNameWithoutChange != null) ? userNameWithoutChange : username; if (accessControlProperty == null || accessControlProperty.trim().isEmpty() || accessControlProperty.equalsIgnoreCase(APIConstants.NO_ACCESS_CONTROL)) { if (log.isDebugEnabled()) { log.debug("API does not have any access control restriction"); } return; } if (APIUtil.hasPermission(userNameWithTenantDomain, APIConstants.Permissions.APIM_ADMIN)) { return; } if (publisherAccessControlRoles != null && !publisherAccessControlRoles.trim().isEmpty()) { String[] accessControlRoleList = publisherAccessControlRoles.replaceAll("\\s+", "").split(","); if (log.isDebugEnabled()) { log.debug("API has restricted access to creators and publishers with the roles : " + Arrays.toString(accessControlRoleList)); } String[] userRoleList = APIUtil.getListOfRoles(userNameWithTenantDomain); if (log.isDebugEnabled()) { log.debug("User " + username + " has roles " + Arrays.toString(userRoleList)); } for (String role : accessControlRoleList) { if (!role.equalsIgnoreCase(APIConstants.NULL_USER_ROLE_LIST) && APIUtil.compareRoleList(userRoleList, role)) { return; } } throw new APIManagementException(APIConstants.UN_AUTHORIZED_ERROR_MESSAGE + " view or modify the api"); } } @Override public void saveGraphqlSchemaDefinition(String apiId, String definition, String organization) throws APIManagementException { try { apiPersistenceInstance.saveGraphQLSchemaDefinition(new Organization(organization), apiId, definition); } catch (GraphQLPersistenceException e) { if (e.getErrorHandler() == ExceptionCodes.API_NOT_FOUND) { throw new APIMgtResourceNotFoundException(e); } else { throw new APIManagementException("Error while saving graphql definition ", e); } } } /** * Returns APIProduct Search result based on the provided query. * * @param registry * @param searchQuery Ex: provider=*admin* * @return APIProduct result * @throws APIManagementException */ public Map<String, Object> searchPaginatedAPIProducts(Registry registry, String searchQuery, int start, int end) throws APIManagementException { SortedSet<APIProduct> productSet = new TreeSet<APIProduct>(new APIProductNameComparator()); List<APIProduct> productList = new ArrayList<APIProduct>(); Map<String, Object> result = new HashMap<String, Object>(); if (log.isDebugEnabled()) { log.debug("Original search query received : " + searchQuery); } Organization org = new Organization(tenantDomain); String[] roles = APIUtil.getFilteredUserRoles(userNameWithoutChange); Map<String, Object> properties = APIUtil.getUserProperties(userNameWithoutChange); UserContext userCtx = new UserContext(userNameWithoutChange, org, properties, roles); try { PublisherAPIProductSearchResult searchAPIs = apiPersistenceInstance.searchAPIProductsForPublisher(org, searchQuery, start, end, userCtx); if (log.isDebugEnabled()) { log.debug("searched API products for query : " + searchQuery + " :-->: " + searchAPIs.toString()); } if (searchAPIs != null) { List<PublisherAPIProductInfo> list = searchAPIs.getPublisherAPIProductInfoList(); List<Object> apiList = new ArrayList<>(); for (PublisherAPIProductInfo publisherAPIInfo : list) { APIProduct mappedAPI = new APIProduct(new APIProductIdentifier(publisherAPIInfo.getProviderName(), publisherAPIInfo.getApiProductName(), publisherAPIInfo.getVersion())); mappedAPI.setUuid(publisherAPIInfo.getId()); mappedAPI.setState(publisherAPIInfo.getState()); mappedAPI.setContext(publisherAPIInfo.getContext()); mappedAPI.setApiSecurity(publisherAPIInfo.getApiSecurity()); productList.add(mappedAPI); } productSet.addAll(productList); result.put("products", productSet); result.put("length", searchAPIs.getTotalAPIsCount()); result.put("isMore", true); } else { result.put("products", productSet); result.put("length", 0); result.put("isMore", false); } } catch (APIPersistenceException e) { throw new APIManagementException("Error while searching the api ", e); } return result ; } /** * Adds a new APIRevision to an existing API * * @param apiRevision APIRevision * @throws APIManagementException if failed to add APIRevision */ @Override public String addAPIRevision(APIRevision apiRevision, String organization) throws APIManagementException { int revisionCountPerAPI = apiMgtDAO.getRevisionCountByAPI(apiRevision.getApiUUID()); if (revisionCountPerAPI > 4) { String errorMessage = "Maximum number of revisions per API has reached. " + "Need to remove stale revision to create a new Revision for API with API UUID:" + apiRevision.getApiUUID(); throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes.MAXIMUM_REVISIONS_REACHED, apiRevision.getApiUUID())); } int revisionId = apiMgtDAO.getMostRecentRevisionId(apiRevision.getApiUUID()) + 1; apiRevision.setId(revisionId); APIIdentifier apiId = APIUtil.getAPIIdentifierFromUUID(apiRevision.getApiUUID()); if (apiId == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiRevision.getApiUUID(), ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiRevision.getApiUUID())); } apiId.setUuid(apiRevision.getApiUUID()); String revisionUUID; try { revisionUUID = apiPersistenceInstance.addAPIRevision(new Organization(organization), apiId.getUUID(), revisionId); } catch (APIPersistenceException e) { String errorMessage = "Failed to add revision registry artifacts"; throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes. ERROR_CREATING_API_REVISION, apiRevision.getApiUUID())); } if (StringUtils.isEmpty(revisionUUID)) { String errorMessage = "Failed to retrieve revision uuid"; throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes.API_REVISION_UUID_NOT_FOUND)); } apiRevision.setRevisionUUID(revisionUUID); apiMgtDAO.addAPIRevision(apiRevision); if (importExportAPI != null) { try { File artifact = importExportAPI .exportAPI(apiRevision.getApiUUID(), revisionUUID, true, ExportFormat.JSON, false, true, organization); // Keeping the organization as tenant domain since MG does not support organization-wise deployment // Artifacts will be deployed in ST for all organizations gatewayArtifactsMgtDAO.addGatewayAPIArtifactAndMetaData(apiRevision.getApiUUID(), apiId.getApiName(), apiId.getVersion(), apiRevision.getRevisionUUID(), tenantDomain, APIConstants.HTTP_PROTOCOL, artifact); if (artifactSaver != null) { // Keeping the organization as tenant domain since MG does not support organization-wise deployment // Artifacts will be deployed in ST for all organizations artifactSaver.saveArtifact(apiRevision.getApiUUID(), apiId.getApiName(), apiId.getVersion(), apiRevision.getRevisionUUID(), tenantDomain, artifact); } } catch (APIImportExportException | ArtifactSynchronizerException e) { throw new APIManagementException("Error while Store the Revision Artifact", ExceptionCodes.from(ExceptionCodes.API_REVISION_UUID_NOT_FOUND)); } } return revisionUUID; } /** * Get a Revision related to provided and revision UUID * * @param revisionUUID API Revision UUID * @return API Revision * @throws APIManagementException if failed to get the related API revision */ @Override public APIRevision getAPIRevision(String revisionUUID) throws APIManagementException { return apiMgtDAO.getRevisionByRevisionUUID(revisionUUID); } /** * Get the revision UUID from the Revision no and API UUID * * @param revisionNum revision number * @param apiUUID UUID of the API * @return UUID of the revision * @throws APIManagementException if failed to get the API revision uuid */ @Override public String getAPIRevisionUUID(String revisionNum, String apiUUID) throws APIManagementException { return apiMgtDAO.getRevisionUUID(revisionNum, apiUUID); } /** * Get the earliest revision UUID from the revision list for a given API * * @param apiUUID API UUID * @return Earliest revision's UUID * @throws APIManagementException if failed to get the revision */ @Override public String getEarliestRevisionUUID(String apiUUID) throws APIManagementException { return apiMgtDAO.getEarliestRevision(apiUUID); } /** * Get the latest revision UUID from the revision list for a given API * * @param apiUUID API UUID * @return Latest revision's UUID * @throws APIManagementException if failed to get the revision */ @Override public String getLatestRevisionUUID(String apiUUID) throws APIManagementException { return apiMgtDAO.getLatestRevisionUUID(apiUUID); } /** * Get a List of API Revisions related to provided API UUID * * @param apiUUID API UUID * @return API Revision List * @throws APIManagementException if failed to get the related API revision */ @Override public List<APIRevision> getAPIRevisions(String apiUUID) throws APIManagementException { return apiMgtDAO.getRevisionsListByAPIUUID(apiUUID); } /** * Adds a new APIRevisionDeployment to an existing API * * @param apiId API UUID * @param apiRevisionId API Revision UUID * @param apiRevisionDeployments List of APIRevisionDeployment objects * @param organization identifier of the organization * @throws APIManagementException if failed to add APIRevision */ @Override public void deployAPIRevision(String apiId, String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments, String organization) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> currentApiRevisionDeploymentList = apiMgtDAO.getAPIRevisionDeploymentsByApiUUID(apiId); APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); API api = getLightweightAPIByUUID(apiId, organization); api.setRevisionedApiId(apiRevision.getRevisionUUID()); api.setRevisionId(apiRevision.getId()); api.setUuid(apiId); api.getId().setUuid(apiId); api.setOrganization(organization); Set<String> environmentsToAdd = new HashSet<>(); Map<String, String> gatewayVhosts = new HashMap<>(); Set<APIRevisionDeployment> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeployments) { for (APIRevisionDeployment currentapiRevisionDeployment : currentApiRevisionDeploymentList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), apiRevisionDeployment.getDeployment())) { environmentsToRemove.add(currentapiRevisionDeployment); } } environmentsToAdd.add(apiRevisionDeployment.getDeployment()); gatewayVhosts.put(apiRevisionDeployment.getDeployment(), apiRevisionDeployment.getVhost()); } if (environmentsToRemove.size() > 0) { apiMgtDAO.removeAPIRevisionDeployment(apiId, environmentsToRemove); removeFromGateway(api, environmentsToRemove, environmentsToAdd); } GatewayArtifactsMgtDAO.getInstance() .addAndRemovePublishedGatewayLabels(apiId, apiRevisionId, environmentsToAdd, gatewayVhosts, environmentsToRemove); apiMgtDAO.addAPIRevisionDeployment(apiRevisionId, apiRevisionDeployments); if (environmentsToAdd.size() > 0) { // TODO remove this to organization once the microgateway can build gateway based on organization. gatewayManager.deployToGateway(api, tenantDomain, environmentsToAdd); } String publishedDefaultVersion = getPublishedDefaultVersion(apiIdentifier); String defaultVersion = getDefaultVersion(apiIdentifier); apiMgtDAO.updateDefaultAPIPublishedVersion(apiIdentifier); if (publishedDefaultVersion != null) { if (apiIdentifier.getVersion().equals(defaultVersion)) { api.setAsPublishedDefaultVersion(true); } if (api.isPublishedDefaultVersion() && !apiIdentifier.getVersion().equals(publishedDefaultVersion)) { APIIdentifier previousDefaultVersionIdentifier = new APIIdentifier(api.getId().getProviderName(), api.getId().getApiName(), publishedDefaultVersion); sendUpdateEventToPreviousDefaultVersion(previousDefaultVersionIdentifier, organization); } } } /** * Adds a new APIRevisionDeployment to an existing API * * @param apiId API UUID * @param apiRevisionUUID API Revision UUID * @param deployedAPIRevisionList List of APIRevisionDeployment objects * @throws APIManagementException if failed to add APIRevision */ @Override public void addDeployedAPIRevision(String apiId, String apiRevisionUUID, List<DeployedAPIRevision> deployedAPIRevisionList) throws APIManagementException { List<DeployedAPIRevision> currentDeployedApiRevisionList = apiMgtDAO.getDeployedAPIRevisionByApiUUID(apiId); Set<DeployedAPIRevision> environmentsToRemove = new HashSet<>(); // Deployments to add List<DeployedAPIRevision> environmentsToAdd = new ArrayList<>(); List<String> envNames = new ArrayList<>(); for (DeployedAPIRevision deployedAPIRevision : deployedAPIRevisionList) { // Remove duplicate entries for same revision uuid and env from incoming list if (!envNames.contains(deployedAPIRevision.getDeployment())) { envNames.add(deployedAPIRevision.getDeployment()); environmentsToAdd.add(deployedAPIRevision); // Remove old deployed-revision entries of same env and apiid from existing db records for (DeployedAPIRevision currentapiRevisionDeployment : currentDeployedApiRevisionList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), deployedAPIRevision.getDeployment())) { environmentsToRemove.add(currentapiRevisionDeployment); } } } } // Discard old deployment info if (environmentsToRemove.size() > 0) { apiMgtDAO.removeDeployedAPIRevision(apiId, environmentsToRemove); } // Add new deployed revision update to db if (deployedAPIRevisionList.size() > 0) { apiMgtDAO.addDeployedAPIRevision(apiRevisionUUID, environmentsToAdd); } } @Override public void updateAPIDisplayOnDevportal(String apiId, String apiRevisionId, APIRevisionDeployment apiRevisionDeployment) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> currentApiRevisionDeploymentList = apiMgtDAO.getAPIRevisionDeploymentsByApiUUID(apiId); Set<APIRevisionDeployment> environmentsToUpdate = new HashSet<>(); for (APIRevisionDeployment currentapiRevisionDeployment : currentApiRevisionDeploymentList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), apiRevisionDeployment.getDeployment())) { environmentsToUpdate.add(apiRevisionDeployment); } } // if the provided deployment doesn't exist we are not adding to update list if (environmentsToUpdate.size() > 0) { apiMgtDAO.updateAPIRevisionDeployment(apiId, environmentsToUpdate); } else { throw new APIMgtResourceNotFoundException("deployment with " + apiRevisionDeployment.getDeployment() + " not found", ExceptionCodes.from(ExceptionCodes.EXISTING_DEPLOYMENT_NOT_FOUND, apiRevisionDeployment.getDeployment())); } } private API getAPIbyUUID(String apiId, APIRevision apiRevision, String organization) throws APIManagementException { API api = getAPIbyUUID(apiRevision.getApiUUID(), organization); api.setRevisionedApiId(apiRevision.getRevisionUUID()); api.setRevisionId(apiRevision.getId()); api.setUuid(apiId); api.getId().setUuid(apiId); return api; } @Override public APIRevisionDeployment getAPIRevisionDeployment(String name, String revisionId) throws APIManagementException { return apiMgtDAO.getAPIRevisionDeploymentByNameAndRevsionID(name,revisionId); } @Override public List<APIRevisionDeployment> getAPIRevisionDeploymentList(String revisionUUID) throws APIManagementException { return apiMgtDAO.getAPIRevisionDeploymentByRevisionUUID(revisionUUID); } /** * Remove a new APIRevisionDeployment to an existing API * * @param apiId API UUID * @param apiRevisionId API Revision UUID * @param apiRevisionDeployments List of APIRevisionDeployment objects * @param organization * @throws APIManagementException if failed to add APIRevision */ @Override public void undeployAPIRevisionDeployment(String apiId, String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments, String organization) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } API api = getAPIbyUUID(apiId, apiRevision, organization); removeFromGateway(api, new HashSet<>(apiRevisionDeployments), Collections.emptySet()); apiMgtDAO.removeAPIRevisionDeployment(apiRevisionId, apiRevisionDeployments); GatewayArtifactsMgtDAO.getInstance().removePublishedGatewayLabels(apiId, apiRevisionId); } /** * Restore a provided API Revision as the current API of the API * * @param apiId API UUID * @param apiRevisionId API Revision UUID * @throws APIManagementException if failed to restore APIRevision */ @Override public void restoreAPIRevision(String apiId, String apiRevisionId, String organization) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } apiIdentifier.setUuid(apiId); try { apiPersistenceInstance.restoreAPIRevision(new Organization(organization), apiIdentifier.getUUID(), apiRevision.getRevisionUUID(), apiRevision.getId()); } catch (APIPersistenceException e) { String errorMessage = "Failed to restore registry artifacts"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. ERROR_RESTORING_API_REVISION,apiRevision.getApiUUID())); } apiMgtDAO.restoreAPIRevision(apiRevision); } /** * Delete an API Revision * * @param apiId API UUID * @param apiRevisionId API Revision UUID * @param organization identifier of the organization * @throws APIManagementException if failed to delete APIRevision */ @Override public void deleteAPIRevision(String apiId, String apiRevisionId, String organization) throws APIManagementException { APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId); if (apiIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> apiRevisionDeploymentsResponse = getAPIRevisionDeploymentList(apiRevisionId); if (apiRevisionDeploymentsResponse.size() != 0) { String errorMessage = "Couldn't delete API revision since API revision is currently deployed to a gateway" + "." + "You need to undeploy the API Revision from the gateway before attempting deleting API Revision: " + apiRevision.getRevisionUUID(); throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes. EXISTING_API_REVISION_DEPLOYMENT_FOUND, apiRevisionId)); } apiIdentifier.setUuid(apiId); try { apiPersistenceInstance.deleteAPIRevision(new Organization(organization), apiIdentifier.getUUID(), apiRevision.getRevisionUUID(), apiRevision.getId()); } catch (APIPersistenceException e) { String errorMessage = "Failed to delete registry artifacts"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. ERROR_DELETING_API_REVISION,apiRevision.getApiUUID())); } apiMgtDAO.deleteAPIRevision(apiRevision); gatewayArtifactsMgtDAO.deleteGatewayArtifact(apiRevision.getApiUUID(), apiRevision.getRevisionUUID()); if (artifactSaver != null) { try { artifactSaver.removeArtifact(apiRevision.getApiUUID(), apiIdentifier.getApiName(), apiIdentifier.getVersion(), apiRevision.getRevisionUUID(), organization); } catch (ArtifactSynchronizerException e) { log.error("Error while deleting Runtime artifacts from artifact Store", e); } } } @Override public String addAPIProductRevision(APIRevision apiRevision, String organization) throws APIManagementException { int revisionCountPerAPI = apiMgtDAO.getRevisionCountByAPI(apiRevision.getApiUUID()); if (revisionCountPerAPI > 4) { String errorMessage = "Maximum number of revisions per API Product has reached. " + "Need to remove stale revision to create a new Revision for API Product with id:" + apiRevision.getApiUUID(); throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes.MAXIMUM_REVISIONS_REACHED, apiRevision.getApiUUID())); } int revisionId = apiMgtDAO.getMostRecentRevisionId(apiRevision.getApiUUID()) + 1; apiRevision.setId(revisionId); APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiRevision.getApiUUID()); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiRevision.getApiUUID(), ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiRevision.getApiUUID())); } apiProductIdentifier.setUUID(apiRevision.getApiUUID()); String revisionUUID; try { revisionUUID = apiPersistenceInstance.addAPIRevision(new Organization(tenantDomain), apiProductIdentifier.getUUID(), revisionId); } catch (APIPersistenceException e) { String errorMessage = "Failed to add revision registry artifacts"; throw new APIManagementException(errorMessage, ExceptionCodes.from(ExceptionCodes. ERROR_CREATING_API_REVISION, apiRevision.getApiUUID())); } if (StringUtils.isEmpty(revisionUUID)) { String errorMessage = "Failed to retrieve revision uuid"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes.API_REVISION_UUID_NOT_FOUND)); } apiRevision.setRevisionUUID(revisionUUID); apiMgtDAO.addAPIProductRevision(apiRevision); try { File artifact = importExportAPI .exportAPIProduct(apiRevision.getApiUUID(), revisionUUID, true, ExportFormat.JSON, false, true, organization); gatewayArtifactsMgtDAO .addGatewayAPIArtifactAndMetaData(apiRevision.getApiUUID(),apiProductIdentifier.getName(), apiProductIdentifier.getVersion(), apiRevision.getRevisionUUID(), tenantDomain, APIConstants.API_PRODUCT, artifact); if (artifactSaver != null) { artifactSaver.saveArtifact(apiRevision.getApiUUID(), apiProductIdentifier.getName(), apiProductIdentifier.getVersion(), apiRevision.getRevisionUUID(), tenantDomain, artifact); } } catch (APIImportExportException | ArtifactSynchronizerException e) { throw new APIManagementException("Error while Store the Revision Artifact", ExceptionCodes.from(ExceptionCodes.API_REVISION_UUID_NOT_FOUND)); } return revisionUUID; } @Override public void deployAPIProductRevision(String apiProductId, String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } APIProduct product = getAPIProductbyUUID(apiRevisionId, tenantDomain); product.setUuid(apiProductId); List<APIRevisionDeployment> currentApiRevisionDeploymentList = apiMgtDAO.getAPIRevisionDeploymentsByApiUUID(apiProductId); APIGatewayManager gatewayManager = APIGatewayManager.getInstance(); Set<String> environmentsToAdd = new HashSet<>(); Map<String, String> gatewayVhosts = new HashMap<>(); Set<APIRevisionDeployment> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeployments) { for (APIRevisionDeployment currentapiRevisionDeployment : currentApiRevisionDeploymentList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), apiRevisionDeployment.getDeployment())) { environmentsToRemove.add(currentapiRevisionDeployment); } } environmentsToAdd.add(apiRevisionDeployment.getDeployment()); gatewayVhosts.put(apiRevisionDeployment.getDeployment(), apiRevisionDeployment.getVhost()); } if (environmentsToRemove.size() > 0) { apiMgtDAO.removeAPIRevisionDeployment(apiProductId,environmentsToRemove); removeFromGateway(product, tenantDomain, environmentsToRemove, environmentsToAdd); } GatewayArtifactsMgtDAO.getInstance() .addAndRemovePublishedGatewayLabels(apiProductId, apiRevisionId, environmentsToAdd, gatewayVhosts, environmentsToRemove); apiMgtDAO.addAPIRevisionDeployment(apiRevisionId, apiRevisionDeployments); if (environmentsToAdd.size() > 0) { gatewayManager.deployToGateway(product, tenantDomain, environmentsToAdd); } } @Override public void updateAPIProductDisplayOnDevportal(String apiProductId, String apiRevisionId, APIRevisionDeployment apiRevisionDeployment) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> currentApiRevisionDeploymentList = apiMgtDAO.getAPIRevisionDeploymentsByApiUUID(apiProductId); Set<APIRevisionDeployment> environmentsToUpdate = new HashSet<>(); for (APIRevisionDeployment currentapiRevisionDeployment : currentApiRevisionDeploymentList) { if (StringUtils.equalsIgnoreCase(currentapiRevisionDeployment.getDeployment(), apiRevisionDeployment.getDeployment())) { environmentsToUpdate.add(apiRevisionDeployment); } } // if the provided deployment doesn't exist we are not adding to update list if (environmentsToUpdate.size() > 0) { apiMgtDAO.updateAPIRevisionDeployment(apiProductId, environmentsToUpdate); } else { throw new APIMgtResourceNotFoundException("deployment with " + apiRevisionDeployment.getDeployment() + " not found", ExceptionCodes.from(ExceptionCodes.EXISTING_DEPLOYMENT_NOT_FOUND, apiRevisionDeployment.getDeployment())); } } @Override public void undeployAPIProductRevisionDeployment(String apiProductId, String apiRevisionId, List<APIRevisionDeployment> apiRevisionDeployments) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } APIProduct product = getAPIProductbyUUID(apiRevisionId, tenantDomain); product.setUuid(apiProductId); Set<String> environmentsToRemove = new HashSet<>(); for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeployments) { environmentsToRemove.add(apiRevisionDeployment.getDeployment()); } product.setEnvironments(environmentsToRemove); removeFromGateway(product, tenantDomain, new HashSet<>(apiRevisionDeployments),Collections.emptySet()); apiMgtDAO.removeAPIRevisionDeployment(apiRevisionId, apiRevisionDeployments); GatewayArtifactsMgtDAO.getInstance().removePublishedGatewayLabels(apiProductId, apiRevisionId); } @Override public void restoreAPIProductRevision(String apiProductId, String apiRevisionId, String organization) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } apiProductIdentifier.setUUID(apiProductId); try { apiPersistenceInstance.restoreAPIRevision(new Organization(organization), apiProductIdentifier.getUUID(), apiRevision.getRevisionUUID(), apiRevision.getId()); } catch (APIPersistenceException e) { String errorMessage = "Failed to restore registry artifacts"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. ERROR_RESTORING_API_REVISION,apiRevision.getApiUUID())); } apiMgtDAO.restoreAPIProductRevision(apiRevision); } @Override public void deleteAPIProductRevision(String apiProductId, String apiRevisionId, String organization) throws APIManagementException { APIProductIdentifier apiProductIdentifier = APIUtil.getAPIProductIdentifierFromUUID(apiProductId); if (apiProductIdentifier == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Product with ID: " + apiProductId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiProductId)); } APIRevision apiRevision = apiMgtDAO.getRevisionByRevisionUUID(apiRevisionId); if (apiRevision == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API Revision with Revision UUID: " + apiRevisionId, ExceptionCodes.from(ExceptionCodes.API_REVISION_NOT_FOUND, apiRevisionId)); } List<APIRevisionDeployment> apiRevisionDeploymentsResponse = getAPIRevisionDeploymentList(apiRevisionId); if (apiRevisionDeploymentsResponse.size() != 0) { String errorMessage = "Couldn't delete API revision since API revision is currently deployed to a gateway." + "You need to undeploy the API Revision from the gateway before attempting deleting API Revision: " + apiRevision.getRevisionUUID(); throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. EXISTING_API_REVISION_DEPLOYMENT_FOUND, apiRevisionId)); } apiProductIdentifier.setUUID(apiProductId); try { apiPersistenceInstance.deleteAPIRevision(new Organization(organization), apiProductIdentifier.getUUID(), apiRevision.getRevisionUUID(), apiRevision.getId()); } catch (APIPersistenceException e) { String errorMessage = "Failed to delete registry artifacts"; throw new APIManagementException(errorMessage,ExceptionCodes.from(ExceptionCodes. ERROR_DELETING_API_REVISION,apiRevision.getApiUUID())); } apiMgtDAO.deleteAPIProductRevision(apiRevision); gatewayArtifactsMgtDAO.deleteGatewayArtifact(apiRevision.getApiUUID(), apiRevision.getRevisionUUID()); if (artifactSaver != null) { try { artifactSaver.removeArtifact(apiRevision.getApiUUID(), apiProductIdentifier.getName(), apiProductIdentifier.getVersion(), apiRevision.getRevisionUUID(), tenantDomain); } catch (ArtifactSynchronizerException e) { log.error("Error while deleting Runtime artifacts from artifact Store", e); } } } @Override public String generateApiKey(String apiId) throws APIManagementException { APIInfo apiInfo = apiMgtDAO.getAPIInfoByUUID(apiId); if (apiInfo == null) { throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with ID: " + apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId)); } SubscribedApiDTO subscribedApiInfo = new SubscribedApiDTO(); subscribedApiInfo.setName(apiInfo.getName()); subscribedApiInfo.setContext(apiInfo.getContext()); subscribedApiInfo.setPublisher(apiInfo.getProvider()); subscribedApiInfo.setVersion(apiInfo.getVersion()); JwtTokenInfoDTO jwtTokenInfoDTO = new JwtTokenInfoDTO(); jwtTokenInfoDTO.setEndUserName(username); jwtTokenInfoDTO.setKeyType(APIConstants.API_KEY_TYPE_PRODUCTION); jwtTokenInfoDTO.setSubscribedApiDTOList(Arrays.asList(subscribedApiInfo)); jwtTokenInfoDTO.setExpirationTime(60 * 1000); ApiKeyGenerator apiKeyGenerator = new InternalAPIKeyGenerator(); return apiKeyGenerator.generateToken(jwtTokenInfoDTO); } @Override public List<APIRevisionDeployment> getAPIRevisionsDeploymentList(String apiId) throws APIManagementException { return apiMgtDAO.getAPIRevisionDeploymentByApiUUID(apiId); } }
Fix the api update issue when the API has local scopes assigned
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/APIProviderImpl.java
Fix the api update issue when the API has local scopes assigned
Java
apache-2.0
fa0675009adc36d5b96af8cb19baafe80bc969c7
0
firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.firebase.database; import static com.google.firebase.database.core.utilities.Utilities.hardAssert; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RestrictTo; import com.google.android.gms.common.internal.Objects; import com.google.android.gms.tasks.Task; import com.google.firebase.database.core.ChildEventRegistration; import com.google.firebase.database.core.EventRegistration; import com.google.firebase.database.core.Path; import com.google.firebase.database.core.Repo; import com.google.firebase.database.core.ValueEventRegistration; import com.google.firebase.database.core.ZombieEventManager; import com.google.firebase.database.core.utilities.PushIdGenerator; import com.google.firebase.database.core.utilities.Validation; import com.google.firebase.database.core.view.QueryParams; import com.google.firebase.database.core.view.QuerySpec; import com.google.firebase.database.snapshot.BooleanNode; import com.google.firebase.database.snapshot.ChildKey; import com.google.firebase.database.snapshot.DoubleNode; import com.google.firebase.database.snapshot.EmptyNode; import com.google.firebase.database.snapshot.Index; import com.google.firebase.database.snapshot.KeyIndex; import com.google.firebase.database.snapshot.Node; import com.google.firebase.database.snapshot.PathIndex; import com.google.firebase.database.snapshot.PriorityIndex; import com.google.firebase.database.snapshot.PriorityUtilities; import com.google.firebase.database.snapshot.StringNode; import com.google.firebase.database.snapshot.ValueIndex; /** * The Query class (and its subclass, {@link DatabaseReference}) are used for reading data. * Listeners are attached, and they will be triggered when the corresponding data changes. <br> * <br> * Instances of Query are obtained by calling startAt(), endAt(), or limit() on a DatabaseReference. */ public class Query { /** @hide */ protected final Repo repo; /** @hide */ protected final Path path; /** @hide */ protected final QueryParams params; // we can't use params index, because the default query params have priority index set as default, // but we don't want to allow multiple orderByPriority calls, so track them here private final boolean orderByCalled; Query(Repo repo, Path path, QueryParams params, boolean orderByCalled) throws DatabaseException { this.repo = repo; this.path = path; this.params = params; this.orderByCalled = orderByCalled; hardAssert(params.isValid(), "Validation of queries failed."); } Query(Repo repo, Path path) { this.repo = repo; this.path = path; this.params = QueryParams.DEFAULT_PARAMS; this.orderByCalled = false; } /** * This method validates that key index has been called with the correct combination of parameters */ private void validateQueryEndpoints(QueryParams params) { if (params.getIndex().equals(KeyIndex.getInstance())) { String message = "You must use startAt(String value), startAfter(String value), endAt(String value), " + "endBefore(String value) or equalTo(String value) in combination with " + "orderByKey(). Other type of values or using the version with 2 parameters is " + "not supported"; if (params.hasStart()) { Node startNode = params.getIndexStartValue(); ChildKey startName = params.getIndexStartName(); if (!Objects.equal(startName, ChildKey.getMinName()) || !(startNode instanceof StringNode)) { throw new IllegalArgumentException(message); } } if (params.hasEnd()) { Node endNode = params.getIndexEndValue(); ChildKey endName = params.getIndexEndName(); if (!endName.equals(ChildKey.getMaxName()) || !(endNode instanceof StringNode)) { throw new IllegalArgumentException(message); } } } else if (params.getIndex().equals(PriorityIndex.getInstance())) { if ((params.hasStart() && !PriorityUtilities.isValidPriority(params.getIndexStartValue())) || (params.hasEnd() && !PriorityUtilities.isValidPriority(params.getIndexEndValue()))) { throw new IllegalArgumentException( "When using orderByPriority(), values provided to startAt(), startAfter(), " + "endAt(), endBefore(), or equalTo() must be valid priorities."); } } } /** This method validates that limit has been called with the correct combination or parameters */ private void validateLimit(QueryParams params) { if (params.hasStart() && params.hasEnd() && params.hasLimit() && !params.hasAnchoredLimit()) { throw new IllegalArgumentException( "Can't combine startAt(), startAfter(), endAt(), endBefore(), and limit(). " + "Use limitToFirst() or limitToLast() instead"); } } /** This method validates that the equalTo call can be made */ private void validateEqualToCall() { if (params.hasStart()) { throw new IllegalArgumentException("Cannot combine equalTo() with startAt() or startAfter()"); } if (params.hasEnd()) { throw new IllegalArgumentException("Cannot combine equalTo() with endAt() or endBefore()"); } } /** This method validates that only one order by call has been made */ private void validateNoOrderByCall() { if (this.orderByCalled) { throw new IllegalArgumentException("You can't combine multiple orderBy calls!"); } } /** * Add a listener for changes in the data at this location. Each time time the data changes, your * listener will be called with an immutable snapshot of the data. * * @param listener The listener to be called with changes * @return A reference to the listener provided. Save this to remove the listener later. */ @NonNull public ValueEventListener addValueEventListener(@NonNull ValueEventListener listener) { addEventRegistration(new ValueEventRegistration(repo, listener, getSpec())); return listener; } /** * Add a listener for child events occurring at this location. When child locations are added, * removed, changed, or moved, the listener will be triggered for the appropriate event * * @param listener The listener to be called with changes * @return A reference to the listener provided. Save this to remove the listener later. */ @NonNull public ChildEventListener addChildEventListener(@NonNull ChildEventListener listener) { addEventRegistration(new ChildEventRegistration(repo, listener, getSpec())); return listener; } /** * Gets the server values for this query. Updates the cache and raises events if successful. If * not connected, falls back to a locally-cached value. */ @NonNull public Task<DataSnapshot> get() { return repo.getValue(this); } /** * Add a listener for a single change in the data at this location. This listener will be * triggered once with the value of the data at the location. * * @param listener The listener to be called with the data */ public void addListenerForSingleValueEvent(@NonNull final ValueEventListener listener) { addEventRegistration( new ValueEventRegistration( repo, new ValueEventListener() { @Override public void onDataChange(DataSnapshot snapshot) { // Removing the event listener will also prevent any further calls into onDataChange removeEventListener(this); listener.onDataChange(snapshot); } @Override public void onCancelled(DatabaseError error) { listener.onCancelled(error); } }, getSpec())); } /** * Remove the specified listener from this location. * * @param listener The listener to remove */ public void removeEventListener(@NonNull final ValueEventListener listener) { if (listener == null) { throw new NullPointerException("listener must not be null"); } removeEventRegistration(new ValueEventRegistration(repo, listener, getSpec())); } /** * Remove the specified listener from this location. * * @param listener The listener to remove */ public void removeEventListener(@NonNull final ChildEventListener listener) { if (listener == null) { throw new NullPointerException("listener must not be null"); } removeEventRegistration(new ChildEventRegistration(repo, listener, getSpec())); } private void removeEventRegistration(final EventRegistration registration) { ZombieEventManager.getInstance().zombifyForRemove(registration); repo.scheduleNow( new Runnable() { @Override public void run() { repo.removeEventCallback(registration); } }); } private void addEventRegistration(final EventRegistration listener) { ZombieEventManager.getInstance().recordEventRegistration(listener); repo.scheduleNow( new Runnable() { @Override public void run() { repo.addEventCallback(listener); } }); } /** * By calling `keepSynced(true)` on a location, the data for that location will automatically be * downloaded and kept in sync, even when no listeners are attached for that location. * Additionally, while a location is kept synced, it will not be evicted from the persistent disk * cache. * * @since 2.3 * @param keepSynced Pass `true` to keep this location synchronized, pass `false` to stop * synchronization. */ public void keepSynced(final boolean keepSynced) { if (!this.path.isEmpty() && this.path.getFront().equals(ChildKey.getInfoKey())) { throw new DatabaseException("Can't call keepSynced() on .info paths."); } repo.scheduleNow( new Runnable() { @Override public void run() { repo.keepSynced(getSpec(), keepSynced); } }); } /** Removes all of the event listeners at this location */ /*public void removeAllEventListeners() { Query.scheduleNow(new Runnable() { @Override public void run() { repo.removeEventCallback(Query.this, null); } }); }*/ /** * Creates a query constrained to only return child nodes with a value greater than the given * value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(@Nullable String value) { if (value != null && params.getIndex().equals(KeyIndex.getInstance())) { return startAt(PushIdGenerator.successor(value)); } return startAt(value, ChildKey.getMaxName().asString()); } /** * Creates a query constrained to only return child nodes with a value greater than the given * value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(double value) { return startAt(value, ChildKey.getMaxName().asString()); } /** * Creates a query constrained to only return child nodes with a value greater than the given * value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(boolean value) { return startAt(value, ChildKey.getMaxName().asString()); } /** * Creates a query constrained to only return child nodes with a value greater or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key greater than the given key. * * @param value The value to start at * @param key The key to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(@Nullable String value, @Nullable String key) { if (value != null && params.getIndex().equals(KeyIndex.getInstance())) { value = PushIdGenerator.successor(value); } Node node = value != null ? new StringNode(value, PriorityUtilities.NullPriority()) : EmptyNode.Empty(); return startAfter(node, key); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than the given key. * * @param value The value to start at * @param key The key name to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(double value, @Nullable String key) { return startAfter(new DoubleNode(value, PriorityUtilities.NullPriority()), key); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than the given key. * * @param value The value to start at * @param key The key to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(boolean value, @Nullable String key) { return startAfter(new BooleanNode(value, PriorityUtilities.NullPriority()), key); } private Query startAfter(Node node, String key) { return startAt(node, PushIdGenerator.successor(key)); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, inclusive * @return A query with the new constraint */ @NonNull public Query startAt(@Nullable String value) { return startAt(value, null); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, inclusive * @return A query with the new constraint */ @NonNull public Query startAt(double value) { return startAt(value, null); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, inclusive * @return A query with the new constraint * @since 2.0 */ @NonNull public Query startAt(boolean value) { return startAt(value, null); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than or equal to the given key. * * @param value The priority to start at, inclusive * @param key The key to start at, inclusive * @return A query with the new constraint */ @NonNull public Query startAt(@Nullable String value, @Nullable String key) { Node node = value != null ? new StringNode(value, PriorityUtilities.NullPriority()) : EmptyNode.Empty(); return startAt(node, key); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than or equal to the given key. * * @param value The priority to start at, inclusive * @param key The key name to start at, inclusive * @return A query with the new constraint */ @NonNull public Query startAt(double value, @Nullable String key) { return startAt(new DoubleNode(value, PriorityUtilities.NullPriority()), key); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than or equal to the given key. * * @param value The priority to start at, inclusive * @param key The key to start at, inclusive * @return A query with the new constraint * @since 2.0 */ @NonNull public Query startAt(boolean value, @Nullable String key) { return startAt(new BooleanNode(value, PriorityUtilities.NullPriority()), key); } private Query startAt(Node node, String key) { Validation.validateNullableKey(key); if (!(node.isLeafNode() || node.isEmpty())) { throw new IllegalArgumentException( "Can only use simple values for startAt() and startAfter()"); } if (params.hasStart()) { throw new IllegalArgumentException( "Can't call startAt(), startAfte(), or equalTo() multiple times"); } ChildKey childKey = null; if (key != null) { if (key.equals(ChildKey.MIN_KEY_NAME)) { childKey = ChildKey.getMinName(); } else if (key.equals(ChildKey.MAX_KEY_NAME)) { childKey = ChildKey.getMaxName(); } else { childKey = ChildKey.fromString(key); } } QueryParams newParams = params.startAt(node, childKey); validateLimit(newParams); validateQueryEndpoints(newParams); hardAssert(newParams.isValid()); return new Query(repo, path, newParams, orderByCalled); } /** * Creates a query constrained to only return child nodes with a value less than the given value, * using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(@Nullable String value) { if (value != null && params.getIndex().equals(KeyIndex.getInstance())) { return endAt(PushIdGenerator.predecessor(value)); } return endAt(value, ChildKey.getMinName().asString()); } /** * Creates a query constrained to only return child nodes with a value less than the given value, * using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(double value) { return endAt(value, ChildKey.getMinName().asString()); } /** * Creates a query constrained to only return child nodes with a value less than the given value, * using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(boolean value) { return endAt(value, ChildKey.getMinName().asString()); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than the given key. * * @param value The value to end at * @param key The key to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(@Nullable String value, @Nullable String key) { if (value != null && params.getIndex().equals(KeyIndex.getInstance())) { value = PushIdGenerator.predecessor(value); } Node node = value != null ? new StringNode(value, PriorityUtilities.NullPriority()) : EmptyNode.Empty(); return endBefore(node, key); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than the given key. * * @param value The value to end at * @param key The key to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(double value, @Nullable String key) { return endBefore(new DoubleNode(value, PriorityUtilities.NullPriority()), key); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than the given key. * * @param value The value to end at * @param key The key to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(boolean value, @Nullable String key) { return endBefore(new BooleanNode(value, PriorityUtilities.NullPriority()), key); } private Query endBefore(Node node, String key) { return endAt(node, PushIdGenerator.predecessor(key)); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, inclusive * @return A query with the new constraint */ @NonNull public Query endAt(@Nullable String value) { return endAt(value, null); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, inclusive * @return A query with the new constraint */ @NonNull public Query endAt(double value) { return endAt(value, null); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, inclusive * @return A query with the new constraint * @since 2.0 */ @NonNull public Query endAt(boolean value) { return endAt(value, null); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than or equal to the given key. * * @param value The value to end at, inclusive * @param key The key to end at, inclusive * @return A query with the new constraint */ @NonNull public Query endAt(@Nullable String value, @Nullable String key) { Node node = value != null ? new StringNode(value, PriorityUtilities.NullPriority()) : EmptyNode.Empty(); return endAt(node, key); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than or equal to the given key. * * @param value The value to end at, inclusive * @param key The key to end at, inclusive * @return A query with the new constraint */ @NonNull public Query endAt(double value, @Nullable String key) { return endAt(new DoubleNode(value, PriorityUtilities.NullPriority()), key); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than or equal to the given key. * * @param value The value to end at, inclusive * @param key The key to end at, inclusive * @return A query with the new constraint * @since 2.0 */ @NonNull public Query endAt(boolean value, @Nullable String key) { return endAt(new BooleanNode(value, PriorityUtilities.NullPriority()), key); } private Query endAt(Node node, String key) { Validation.validateNullableKey(key); if (!(node.isLeafNode() || node.isEmpty())) { throw new IllegalArgumentException("Can only use simple values for endAt()"); } ChildKey childKey = key != null ? ChildKey.fromString(key) : null; if (params.hasEnd()) { throw new IllegalArgumentException("Can't call endAt() or equalTo() multiple times"); } QueryParams newParams = params.endAt(node, childKey); validateLimit(newParams); validateQueryEndpoints(newParams); hardAssert(newParams.isValid()); return new Query(repo, path, newParams, orderByCalled); } /** * Creates a query constrained to only return child nodes with the given value. * * @param value The value to query for * @return A query with the new constraint */ @NonNull public Query equalTo(@Nullable String value) { validateEqualToCall(); return this.startAt(value).endAt(value); } /** * Creates a query constrained to only return child nodes with the given value. * * @param value The value to query for * @return A query with the new constraint */ @NonNull public Query equalTo(double value) { validateEqualToCall(); return this.startAt(value).endAt(value); } /** * Creates a query constrained to only return child nodes with the given value. * * @param value The value to query for * @return A query with the new constraint * @since 2.0 */ @NonNull public Query equalTo(boolean value) { validateEqualToCall(); return this.startAt(value).endAt(value); } /** * Creates a query constrained to only return the child node with the given key and value. Note * that there is at most one such child as names are unique. * * @param value The value to query for * @param key The key of the child * @return A query with the new constraint */ @NonNull public Query equalTo(@Nullable String value, @Nullable String key) { validateEqualToCall(); return this.startAt(value, key).endAt(value, key); } /** * Creates a query constrained to only return the child node with the given key and value. Note * that there is at most one such child as keys are unique. * * @param value The value to query for * @param key The key of the child * @return A query with the new constraint */ @NonNull public Query equalTo(double value, @Nullable String key) { validateEqualToCall(); return this.startAt(value, key).endAt(value, key); } /** * Creates a query constrained to only return the child node with the given key and value. Note * that there is at most one such child as keys are unique. * * @param value The value to query for * @param key The name of the child * @return A query with the new constraint */ @NonNull public Query equalTo(boolean value, @Nullable String key) { validateEqualToCall(); return this.startAt(value, key).endAt(value, key); } /** * Creates a query with limit and anchor it to the start of the window. * * @param limit The maximum number of child nodes to return * @return A query with the new constraint * @since 2.0 */ @NonNull public Query limitToFirst(int limit) { if (limit <= 0) { throw new IllegalArgumentException("Limit must be a positive integer!"); } if (params.hasLimit()) { throw new IllegalArgumentException( "Can't call limitToLast on query with previously set limit!"); } return new Query(repo, path, params.limitToFirst(limit), orderByCalled); } /** * Creates a query with limit and anchor it to the end of the window. * * @param limit The maximum number of child nodes to return * @return A query with the new constraint * @since 2.0 */ @NonNull public Query limitToLast(int limit) { if (limit <= 0) { throw new IllegalArgumentException("Limit must be a positive integer!"); } if (params.hasLimit()) { throw new IllegalArgumentException( "Can't call limitToLast on query with previously set limit!"); } return new Query(repo, path, params.limitToLast(limit), orderByCalled); } /** * Creates a query in which child nodes are ordered by the values of the specified path. * * @param path The path to the child node to use for sorting * @return A query with the new constraint * @since 2.0 */ @NonNull public Query orderByChild(@NonNull String path) { if (path == null) { throw new NullPointerException("Key can't be null"); } if (path.equals("$key") || path.equals(".key")) { throw new IllegalArgumentException( "Can't use '" + path + "' as path, please use orderByKey() instead!"); } if (path.equals("$priority") || path.equals(".priority")) { throw new IllegalArgumentException( "Can't use '" + path + "' as path, please use orderByPriority() instead!"); } if (path.equals("$value") || path.equals(".value")) { throw new IllegalArgumentException( "Can't use '" + path + "' as path, please use orderByValue() instead!"); } Validation.validatePathString(path); validateNoOrderByCall(); Path indexPath = new Path(path); if (indexPath.size() == 0) { throw new IllegalArgumentException("Can't use empty path, use orderByValue() instead!"); } Index index = new PathIndex(indexPath); return new Query(repo, this.path, params.orderBy(index), true); } /** * Creates a query in which child nodes are ordered by their priorities. * * @return A query with the new constraint * @since 2.0 */ @NonNull public Query orderByPriority() { validateNoOrderByCall(); QueryParams newParams = params.orderBy(PriorityIndex.getInstance()); validateQueryEndpoints(newParams); return new Query(repo, path, newParams, true); } /** * Creates a query in which child nodes are ordered by their keys. * * @return A query with the new constraint * @since 2.0 */ @NonNull public Query orderByKey() { validateNoOrderByCall(); QueryParams newParams = this.params.orderBy(KeyIndex.getInstance()); validateQueryEndpoints(newParams); return new Query(repo, path, newParams, true); } /** * Creates a query in which nodes are ordered by their value * * @return A query with the new constraint * @since 2.2 */ @NonNull public Query orderByValue() { validateNoOrderByCall(); return new Query(repo, path, params.orderBy(ValueIndex.getInstance()), true); } /** @return A DatabaseReference to this location */ @NonNull public DatabaseReference getRef() { return new DatabaseReference(repo, getPath()); } // Need to hide these... /** * <strong>For internal use</strong> * * @hide * @return The path to this location */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public Path getPath() { return path; } /** * <strong>For internal use</strong> * * @hide * @return The repo */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public Repo getRepo() { return repo; } /** * <strong>For internal use</strong> * * @hide * @return The constraints */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public QuerySpec getSpec() { return new QuerySpec(path, params); } }
firebase-database/src/main/java/com/google/firebase/database/Query.java
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.firebase.database; import static com.google.firebase.database.core.utilities.Utilities.hardAssert; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RestrictTo; import com.google.android.gms.common.internal.Objects; import com.google.android.gms.tasks.Task; import com.google.firebase.database.core.ChildEventRegistration; import com.google.firebase.database.core.EventRegistration; import com.google.firebase.database.core.Path; import com.google.firebase.database.core.Repo; import com.google.firebase.database.core.ValueEventRegistration; import com.google.firebase.database.core.ZombieEventManager; import com.google.firebase.database.core.utilities.PushIdGenerator; import com.google.firebase.database.core.utilities.Validation; import com.google.firebase.database.core.view.QueryParams; import com.google.firebase.database.core.view.QuerySpec; import com.google.firebase.database.snapshot.BooleanNode; import com.google.firebase.database.snapshot.ChildKey; import com.google.firebase.database.snapshot.DoubleNode; import com.google.firebase.database.snapshot.EmptyNode; import com.google.firebase.database.snapshot.Index; import com.google.firebase.database.snapshot.KeyIndex; import com.google.firebase.database.snapshot.Node; import com.google.firebase.database.snapshot.PathIndex; import com.google.firebase.database.snapshot.PriorityIndex; import com.google.firebase.database.snapshot.PriorityUtilities; import com.google.firebase.database.snapshot.StringNode; import com.google.firebase.database.snapshot.ValueIndex; /** * The Query class (and its subclass, {@link DatabaseReference}) are used for reading data. * Listeners are attached, and they will be triggered when the corresponding data changes. <br> * <br> * Instances of Query are obtained by calling startAt(), endAt(), or limit() on a DatabaseReference. */ public class Query { /** @hide */ protected final Repo repo; /** @hide */ protected final Path path; /** @hide */ protected final QueryParams params; // we can't use params index, because the default query params have priority index set as default, // but we don't want to allow multiple orderByPriority calls, so track them here private final boolean orderByCalled; Query(Repo repo, Path path, QueryParams params, boolean orderByCalled) throws DatabaseException { this.repo = repo; this.path = path; this.params = params; this.orderByCalled = orderByCalled; hardAssert(params.isValid(), "Validation of queries failed."); } Query(Repo repo, Path path) { this.repo = repo; this.path = path; this.params = QueryParams.DEFAULT_PARAMS; this.orderByCalled = false; } /** * This method validates that key index has been called with the correct combination of parameters */ private void validateQueryEndpoints(QueryParams params) { if (params.getIndex().equals(KeyIndex.getInstance())) { String message = "You must use startAt(String value), startAfter(String value), endAt(String value), " + "endBefore(String value) or equalTo(String value) in combination with " + "orderByKey(). Other type of values or using the version with 2 parameters is " + "not supported"; if (params.hasStart()) { Node startNode = params.getIndexStartValue(); ChildKey startName = params.getIndexStartName(); if (!Objects.equal(startName, ChildKey.getMinName()) || !(startNode instanceof StringNode)) { throw new IllegalArgumentException(message); } } if (params.hasEnd()) { Node endNode = params.getIndexEndValue(); ChildKey endName = params.getIndexEndName(); if (!endName.equals(ChildKey.getMaxName()) || !(endNode instanceof StringNode)) { throw new IllegalArgumentException(message); } } } else if (params.getIndex().equals(PriorityIndex.getInstance())) { if ((params.hasStart() && !PriorityUtilities.isValidPriority(params.getIndexStartValue())) || (params.hasEnd() && !PriorityUtilities.isValidPriority(params.getIndexEndValue()))) { throw new IllegalArgumentException( "When using orderByPriority(), values provided to startAt(), startAfter(), " + "endAt(), endBefore(), or equalTo() must be valid priorities."); } } } /** This method validates that limit has been called with the correct combination or parameters */ private void validateLimit(QueryParams params) { if (params.hasStart() && params.hasEnd() && params.hasLimit() && !params.hasAnchoredLimit()) { throw new IllegalArgumentException( "Can't combine startAt(), startAfter(), endAt(), endBefore(), and limit(). " + "Use limitToFirst() or limitToLast() instead"); } } /** This method validates that the equalTo call can be made */ private void validateEqualToCall() { if (params.hasStart()) { throw new IllegalArgumentException("Cannot combine equalTo() with startAt() or startAfter()"); } if (params.hasEnd()) { throw new IllegalArgumentException("Cannot combine equalTo() with endAt() or endBefore()"); } } /** This method validates that only one order by call has been made */ private void validateNoOrderByCall() { if (this.orderByCalled) { throw new IllegalArgumentException("You can't combine multiple orderBy calls!"); } } /** * Add a listener for changes in the data at this location. Each time time the data changes, your * listener will be called with an immutable snapshot of the data. * * @param listener The listener to be called with changes * @return A reference to the listener provided. Save this to remove the listener later. */ @NonNull public ValueEventListener addValueEventListener(@NonNull ValueEventListener listener) { addEventRegistration(new ValueEventRegistration(repo, listener, getSpec())); return listener; } /** * Add a listener for child events occurring at this location. When child locations are added, * removed, changed, or moved, the listener will be triggered for the appropriate event * * @param listener The listener to be called with changes * @return A reference to the listener provided. Save this to remove the listener later. */ @NonNull public ChildEventListener addChildEventListener(@NonNull ChildEventListener listener) { addEventRegistration(new ChildEventRegistration(repo, listener, getSpec())); return listener; } /** * Gets the server values for this query. Updates the cache and raises events if successful. If * not connected, falls back to a locally-cached value. */ @NonNull public Task<DataSnapshot> get() { return repo.getValue(this); } /** * Add a listener for a single change in the data at this location. This listener will be * triggered once with the value of the data at the location. * * @param listener The listener to be called with the data */ public void addListenerForSingleValueEvent(@NonNull final ValueEventListener listener) { addEventRegistration( new ValueEventRegistration( repo, new ValueEventListener() { @Override public void onDataChange(DataSnapshot snapshot) { // Removing the event listener will also prevent any further calls into onDataChange removeEventListener(this); listener.onDataChange(snapshot); } @Override public void onCancelled(DatabaseError error) { listener.onCancelled(error); } }, getSpec())); } /** * Remove the specified listener from this location. * * @param listener The listener to remove */ public void removeEventListener(@NonNull final ValueEventListener listener) { if (listener == null) { throw new NullPointerException("listener must not be null"); } removeEventRegistration(new ValueEventRegistration(repo, listener, getSpec())); } /** * Remove the specified listener from this location. * * @param listener The listener to remove */ public void removeEventListener(@NonNull final ChildEventListener listener) { if (listener == null) { throw new NullPointerException("listener must not be null"); } removeEventRegistration(new ChildEventRegistration(repo, listener, getSpec())); } private void removeEventRegistration(final EventRegistration registration) { ZombieEventManager.getInstance().zombifyForRemove(registration); repo.scheduleNow( new Runnable() { @Override public void run() { repo.removeEventCallback(registration); } }); } private void addEventRegistration(final EventRegistration listener) { ZombieEventManager.getInstance().recordEventRegistration(listener); repo.scheduleNow( new Runnable() { @Override public void run() { repo.addEventCallback(listener); } }); } /** * By calling `keepSynced(true)` on a location, the data for that location will automatically be * downloaded and kept in sync, even when no listeners are attached for that location. * Additionally, while a location is kept synced, it will not be evicted from the persistent disk * cache. * * @since 2.3 * @param keepSynced Pass `true` to keep this location synchronized, pass `false` to stop * synchronization. */ public void keepSynced(final boolean keepSynced) { if (!this.path.isEmpty() && this.path.getFront().equals(ChildKey.getInfoKey())) { throw new DatabaseException("Can't call keepSynced() on .info paths."); } repo.scheduleNow( new Runnable() { @Override public void run() { repo.keepSynced(getSpec(), keepSynced); } }); } /** Removes all of the event listeners at this location */ /*public void removeAllEventListeners() { Query.scheduleNow(new Runnable() { @Override public void run() { repo.removeEventCallback(Query.this, null); } }); }*/ /** * Creates a query constrained to only return child nodes with a value greater than the given * value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(@Nullable String value) { if (value != null && params.getIndex().equals(KeyIndex.getInstance())) { return startAt(PushIdGenerator.successor(value)); } return startAt(value, ChildKey.getMaxName().asString()); } /** * Creates a query constrained to only return child nodes with a value greater than the given * value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(double value) { return startAt(value, ChildKey.getMaxName().asString()); } /** * Creates a query constrained to only return child nodes with a value greater than the given * value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(boolean value) { return startAt(value, ChildKey.getMaxName().asString()); } /** * Creates a query constrained to only return child nodes with a value greater or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key greater than the given key. * * @param value The value to start at * @param key The key to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(@Nullable String value, @Nullable String key) { if (value != null && params.getIndex().equals(KeyIndex.getInstance())) { value = PushIdGenerator.successor(value); } Node node = value != null ? new StringNode(value, PriorityUtilities.NullPriority()) : EmptyNode.Empty(); return startAfter(node, key); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than the given key. * * @param value The value to start at * @param key The key name to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(double value, @Nullable String key) { return startAfter(new DoubleNode(value, PriorityUtilities.NullPriority()), key); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than the given key. * * @param value The value to start at * @param key The key to start at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query startAfter(boolean value, @Nullable String key) { return startAfter(new BooleanNode(value, PriorityUtilities.NullPriority()), key); } private Query startAfter(Node node, String key) { return startAt(node, PushIdGenerator.successor(key)); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, inclusive * @return A query with the new constraint */ @NonNull public Query startAt(@Nullable String value) { return startAt(value, null); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, inclusive * @return A query with the new constraint */ @NonNull public Query startAt(double value) { return startAt(value, null); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to start at, inclusive * @return A query with the new constraint * @since 2.0 */ @NonNull public Query startAt(boolean value) { return startAt(value, null); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than or equal to the given key. * * @param value The priority to start at, inclusive * @param key The key to start at, inclusive * @return A query with the new constraint */ @NonNull public Query startAt(@Nullable String value, @Nullable String key) { Node node = value != null ? new StringNode(value, PriorityUtilities.NullPriority()) : EmptyNode.Empty(); return startAt(node, key); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than or equal to the given key. * * @param value The priority to start at, inclusive * @param key The key name to start at, inclusive * @return A query with the new constraint */ @NonNull public Query startAt(double value, @Nullable String key) { return startAt(new DoubleNode(value, PriorityUtilities.NullPriority()), key); } /** * Creates a query constrained to only return child nodes with a value greater than or equal to * the given value, using the given {@code orderBy} directive or priority as default, and * additionally only child nodes with a key greater than or equal to the given key. * * @param value The priority to start at, inclusive * @param key The key to start at, inclusive * @return A query with the new constraint * @since 2.0 */ @NonNull public Query startAt(boolean value, @Nullable String key) { return startAt(new BooleanNode(value, PriorityUtilities.NullPriority()), key); } private Query startAt(Node node, String key) { Validation.validateNullableKey(key); if (!(node.isLeafNode() || node.isEmpty())) { throw new IllegalArgumentException( "Can only use simple values for startAt() and startAfter()"); } if (params.hasStart()) { throw new IllegalArgumentException( "Can't call startAt(), startAfte(), or equalTo() multiple times"); } ChildKey childKey = null; if (key != null) { if (key.equals(ChildKey.MIN_KEY_NAME)) { childKey = ChildKey.getMinName(); } else if (key.equals(ChildKey.MAX_KEY_NAME)) { childKey = ChildKey.getMaxName(); } else { childKey = ChildKey.fromString(key); } } QueryParams newParams = params.startAt(node, childKey); validateLimit(newParams); validateQueryEndpoints(newParams); hardAssert(newParams.isValid()); return new Query(repo, path, newParams, orderByCalled); } /** * Creates a query constrained to only return child nodes with a value less than the given value, * using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(@Nullable String value) { if (value != null && params.getIndex().equals(KeyIndex.getInstance())) { return endAt(PushIdGenerator.predecessor(value)); } return endAt(value, ChildKey.getMinName().asString()); } /** * Creates a query constrained to only return child nodes with a value less than the given value, * using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(double value) { return endAt(value, ChildKey.getMinName().asString()); } /** * Creates a query constrained to only return child nodes with a value less than the given value, * using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(boolean value) { return endAt(value, ChildKey.getMinName().asString()); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than the given key. * * @param value The value to end at * @param key The key to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(@Nullable String value, @Nullable String key) { if (value != null && params.getIndex().equals(KeyIndex.getInstance())) { value = PushIdGenerator.predecessor(value); } Node node = value != null ? new StringNode(value, PriorityUtilities.NullPriority()) : EmptyNode.Empty(); return endBefore(node, key); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than the given key. * * @param value The value to end at * @param key The key to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(double value, @Nullable String key) { return endBefore(new DoubleNode(value, PriorityUtilities.NullPriority()), key); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than the given key. * * @param value The value to end at * @param key The key to end at, exclusive * @return A query with the new constraint * @since 19.6 */ @NonNull public Query endBefore(boolean value, @Nullable String key) { return endBefore(new BooleanNode(value, PriorityUtilities.NullPriority()), key); } private Query endBefore(Node node, String key) { return endAt(node, PushIdGenerator.predecessor(key)); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, inclusive * @return A query with the new constraint */ @NonNull public Query endAt(@Nullable String value) { return endAt(value, null); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, inclusive * @return A query with the new constraint */ @NonNull public Query endAt(double value) { return endAt(value, null); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default. * * @param value The value to end at, inclusive * @return A query with the new constraint * @since 2.0 */ @NonNull public Query endAt(boolean value) { return endAt(value, null); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key key less than or equal to the given key. * * @param value The value to end at, inclusive * @param key The key to end at, inclusive * @return A query with the new constraint */ @NonNull public Query endAt(@Nullable String value, @Nullable String key) { Node node = value != null ? new StringNode(value, PriorityUtilities.NullPriority()) : EmptyNode.Empty(); return endAt(node, key); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than or equal to the given key. * * @param value The value to end at, inclusive * @param key The key to end at, inclusive * @return A query with the new constraint */ @NonNull public Query endAt(double value, @Nullable String key) { return endAt(new DoubleNode(value, PriorityUtilities.NullPriority()), key); } /** * Creates a query constrained to only return child nodes with a value less than or equal to the * given value, using the given {@code orderBy} directive or priority as default, and additionally * only child nodes with a key less than or equal to the given key. * * @param value The value to end at, inclusive * @param key The key to end at, inclusive * @return A query with the new constraint * @since 2.0 */ @NonNull public Query endAt(boolean value, @Nullable String key) { return endAt(new BooleanNode(value, PriorityUtilities.NullPriority()), key); } private Query endAt(Node node, String key) { Validation.validateNullableKey(key); if (!(node.isLeafNode() || node.isEmpty())) { throw new IllegalArgumentException("Can only use simple values for endAt()"); } ChildKey childKey = key != null ? ChildKey.fromString(key) : null; if (params.hasEnd()) { throw new IllegalArgumentException("Can't call endAt() or equalTo() multiple times"); } QueryParams newParams = params.endAt(node, childKey); validateLimit(newParams); validateQueryEndpoints(newParams); hardAssert(newParams.isValid()); return new Query(repo, path, newParams, orderByCalled); } /** * Creates a query constrained to only return child nodes with the given value. * * @param value The value to query for * @return A query with the new constraint */ @NonNull public Query equalTo(@Nullable String value) { validateEqualToCall(); return this.startAt(value).endAt(value); } /** * Creates a query constrained to only return child nodes with the given value. * * @param value The value to query for * @return A query with the new constraint */ @NonNull public Query equalTo(double value) { validateEqualToCall(); return this.startAt(value).endAt(value); } /** * Creates a query constrained to only return child nodes with the given value. * * @param value The value to query for * @return A query with the new constraint * @since 2.0 */ @NonNull public Query equalTo(boolean value) { validateEqualToCall(); return this.startAt(value).endAt(value); } /** * Creates a query constrained to only return the child node with the given key and value. Note * that there is at most one such child as names are unique. * * @param value The value to query for * @param key The key of the child * @return A query with the new constraint */ @NonNull public Query equalTo(@Nullable String value, @Nullable String key) { validateEqualToCall(); return this.startAt(value, key).endAt(value, key); } /** * Creates a query constrained to only return the child node with the given key and value. Note * that there is at most one such child as keys are unique. * * @param value The value to query for * @param key The key of the child * @return A query with the new constraint */ @NonNull public Query equalTo(double value, @Nullable String key) { validateEqualToCall(); return this.startAt(value, key).endAt(value, key); } /** * Creates a query constrained to only return the child node with the given key and value. Note * that there is at most one such child as keys are unique. * * @param value The value to query for * @param key The name of the child * @return A query with the new constraint */ @NonNull public Query equalTo(boolean value, @Nullable String key) { validateEqualToCall(); return this.startAt(value, key).endAt(value, key); } /** * Creates a query with limit and anchor it to the start of the window. * * @param limit The maximum number of child nodes to return * @return A query with the new constraint * @since 2.0 */ @NonNull public Query limitToFirst(int limit) { if (limit <= 0) { throw new IllegalArgumentException("Limit must be a positive integer!"); } if (params.hasLimit()) { throw new IllegalArgumentException( "Can't call limitToLast on query with previously set limit!"); } return new Query(repo, path, params.limitToFirst(limit), orderByCalled); } /** * Creates a query with limit and anchor it to the end of the window. * * @param limit The maximum number of child nodes to return * @return A query with the new constraint * @since 2.0 */ @NonNull public Query limitToLast(int limit) { if (limit <= 0) { throw new IllegalArgumentException("Limit must be a positive integer!"); } if (params.hasLimit()) { throw new IllegalArgumentException( "Can't call limitToLast on query with previously set limit!"); } return new Query(repo, path, params.limitToLast(limit), orderByCalled); } /** * Creates a query in which child nodes are ordered by the values of the specified path. * * @param path The path to the child node to use for sorting * @return A query with the new constraint * @since 2.0 */ @NonNull public Query orderByChild(@NonNull String path) { if (path == null) { throw new NullPointerException("Key can't be null"); } if (path.equals("$key") || path.equals(".key")) { throw new IllegalArgumentException( "Can't use '" + path + "' as path, please use orderByKey() instead!"); } if (path.equals("$priority") || path.equals(".priority")) { throw new IllegalArgumentException( "Can't use '" + path + "' as path, please use orderByPriority() instead!"); } if (path.equals("$value") || path.equals(".value")) { throw new IllegalArgumentException( "Can't use '" + path + "' as path, please use orderByValue() instead!"); } Validation.validatePathString(path); validateNoOrderByCall(); Path indexPath = new Path(path); if (indexPath.size() == 0) { throw new IllegalArgumentException("Can't use empty path, use orderByValue() instead!"); } Index index = new PathIndex(indexPath); return new Query(repo, this.path, params.orderBy(index), true); } /** * Creates a query in which child nodes are ordered by their priorities. * * @return A query with the new constraint * @since 2.0 */ @NonNull public Query orderByPriority() { validateNoOrderByCall(); QueryParams newParams = params.orderBy(PriorityIndex.getInstance()); validateQueryEndpoints(newParams); return new Query(repo, path, newParams, true); } /** * Creates a query in which child nodes are ordered by their keys. * * @return A query with the new constraint * @since 2.0 */ @NonNull public Query orderByKey() { validateNoOrderByCall(); QueryParams newParams = this.params.orderBy(KeyIndex.getInstance()); validateQueryEndpoints(newParams); return new Query(repo, path, newParams, true); } /** * Creates a query in which nodes are ordered by their value * * @return A query with the new constraint * @since 2.2 */ @NonNull public Query orderByValue() { validateNoOrderByCall(); return new Query(repo, path, params.orderBy(ValueIndex.getInstance()), true); } /** @return A DatabaseReference to this location */ @NonNull public DatabaseReference getRef() { return new DatabaseReference(repo, getPath()); } // Need to hide these... /** * <strong>For internal use</strong> * * @hide * @return The path to this location */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public Path getPath() { return path; } /** * <strong>For internal use</strong> * * @hide * @return The repo */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public Repo getRepo() { return repo; } /** * <strong>For internal use</strong> * * @hide * @return The constraints */ @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) public QuerySpec getSpec() { return new QuerySpec(path, params); } }
Key key -> key (#2493)
firebase-database/src/main/java/com/google/firebase/database/Query.java
Key key -> key (#2493)
Java
apache-2.0
2308386319a119785f6df0fa770c997ffd72d1a2
0
Panda-Programming-Language/Panda
/* * Copyright (c) 2015-2018 Dzikoysk * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.panda_lang.panda.framework.language.parser.implementation.statement; import org.panda_lang.panda.framework.design.interpreter.parser.PandaPipelines; import org.panda_lang.panda.framework.design.interpreter.parser.PandaPriorities; import org.panda_lang.panda.framework.design.interpreter.parser.ParserData; import org.panda_lang.panda.framework.design.interpreter.parser.UnifiedParser; import org.panda_lang.panda.framework.design.interpreter.parser.component.UniversalComponents; import org.panda_lang.panda.framework.design.interpreter.parser.pipeline.ParserPipeline; import org.panda_lang.panda.framework.design.interpreter.parser.pipeline.ParserRegistration; import org.panda_lang.panda.framework.design.interpreter.parser.pipeline.ParserRepresentation; import org.panda_lang.panda.framework.design.interpreter.parser.pipeline.registry.PipelineRegistry; import org.panda_lang.panda.framework.design.interpreter.token.TokenizedSource; import org.panda_lang.panda.framework.design.interpreter.token.distributor.SourceStream; import org.panda_lang.panda.framework.language.interpreter.parser.PandaParserFailure; import org.panda_lang.panda.framework.language.interpreter.pattern.abyss.AbyssPattern; import org.panda_lang.panda.framework.language.interpreter.pattern.abyss.utils.AbyssPatternBuilder; import org.panda_lang.panda.framework.language.interpreter.token.PandaSyntax; import org.panda_lang.panda.framework.language.interpreter.token.distributor.PandaSourceStream; import org.panda_lang.panda.framework.language.parser.bootstrap.PandaParserBootstrap; import org.panda_lang.panda.framework.language.parser.bootstrap.annotations.Autowired; import org.panda_lang.panda.framework.language.parser.bootstrap.annotations.Component; import org.panda_lang.panda.framework.language.parser.bootstrap.annotations.Redactor; import org.panda_lang.panda.framework.language.parser.bootstrap.layer.Delegation; @ParserRegistration(target = PandaPipelines.SCOPE, parserClass = StatementParser.class, handlerClass = StatementParserHandler.class, priority = PandaPriorities.STATEMENT_VARIABLE_PARSER) public class StatementParser implements UnifiedParser { protected static final AbyssPattern PATTERN = new AbyssPatternBuilder() .compile(PandaSyntax.getInstance(), "+* ;") .build(); private ParserRepresentation bootstrapParser = PandaParserBootstrap.builder() .pattern("+* ;", "statement") .instance(this) .build(); @Override public boolean parse(ParserData data) { return bootstrapParser.getParser().parse(data); } @Autowired(value = Delegation.IMMEDIATELY, order = 1) private void parseStatement(ParserData data, @Component PipelineRegistry registry, @Redactor("statement") TokenizedSource statement) { SourceStream declarationStream = new PandaSourceStream(statement); ParserPipeline pipeline = registry.getPipeline(PandaPipelines.STATEMENT); UnifiedParser statementParser = pipeline.handle(declarationStream); if (statementParser == null) { throw new PandaParserFailure("Cannot recognize statement", data); } ParserData statementParserData = data.fork(); statementParserData.setComponent(UniversalComponents.SOURCE_STREAM, declarationStream); statementParser.parse(statementParserData); } }
panda/src/main/java/org/panda_lang/panda/framework/language/parser/implementation/statement/StatementParser.java
/* * Copyright (c) 2015-2018 Dzikoysk * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.panda_lang.panda.framework.language.parser.implementation.statement; import org.panda_lang.panda.framework.language.interpreter.token.PandaSyntax; import org.panda_lang.panda.framework.design.interpreter.parser.pipeline.registry.*; import org.panda_lang.panda.framework.design.interpreter.token.*; import org.panda_lang.panda.framework.design.interpreter.parser.*; import org.panda_lang.panda.framework.design.interpreter.parser.component.*; import org.panda_lang.panda.framework.design.interpreter.parser.generation.casual.*; import org.panda_lang.panda.framework.design.interpreter.parser.generation.util.*; import org.panda_lang.panda.framework.design.interpreter.parser.pipeline.*; import org.panda_lang.panda.framework.design.interpreter.token.distributor.*; import org.panda_lang.panda.framework.language.interpreter.parser.*; import org.panda_lang.panda.framework.language.interpreter.pattern.abyss.utils.AbyssPatternAssistant; import org.panda_lang.panda.framework.language.interpreter.pattern.abyss.utils.AbyssPatternBuilder; import org.panda_lang.panda.framework.language.interpreter.token.distributor.*; import org.panda_lang.panda.framework.language.interpreter.pattern.abyss.*; import org.panda_lang.panda.framework.language.interpreter.pattern.abyss.redactor.*; import org.panda_lang.panda.framework.language.interpreter.token.utils.*; @ParserRegistration(target = PandaPipelines.SCOPE, parserClass = StatementParser.class, handlerClass = StatementParserHandler.class, priority = PandaPriorities.STATEMENT_VARIABLE_PARSER) public class StatementParser implements UnifiedParser { protected static final AbyssPattern PATTERN = new AbyssPatternBuilder() .compile(PandaSyntax.getInstance(), "+* ;") .build(); @Override public boolean parse(ParserData data) { PipelineRegistry pipelineRegistry = data.getComponent(UniversalComponents.PIPELINE); ParserPipeline pipeline = pipelineRegistry.getPipeline(PandaPipelines.STATEMENT); AbyssRedactorHollows hollows = AbyssPatternAssistant.extract(PATTERN, data); TokenizedSource source = hollows.getGap(0); SourceStream declarationStream = new PandaSourceStream(source); UnifiedParser statementParser = pipeline.handle(declarationStream); if (statementParser == null) { throw new PandaParserException("Cannot recognize block at line " + TokenUtils.getLine(source)); } ParserData statementParserData = data.fork(); statementParserData.setComponent(UniversalComponents.SOURCE_STREAM, declarationStream); statementParser.parse(statementParserData); return true; } @LocalCallback private static class DeclarationParserCallback implements CasualParserGenerationCallback { @Override public void call(ParserData delegatedData, CasualParserGenerationLayer nextLayer) { PipelineRegistry pipelineRegistry = delegatedData.getComponent(UniversalComponents.PIPELINE); ParserPipeline pipeline = pipelineRegistry.getPipeline(PandaPipelines.STATEMENT); AbyssRedactorHollows hollows = AbyssPatternAssistant.extract(PATTERN, delegatedData); TokenizedSource source = hollows.getGap(0); SourceStream declarationStream = new PandaSourceStream(source); UnifiedParser statementParser = pipeline.handle(declarationStream); if (statementParser == null) { throw new PandaParserException("Cannot recognize statement at line " + TokenUtils.getLine(source)); } ParserData statementParserData = delegatedData.fork(); statementParserData.setComponent(UniversalComponents.SOURCE_STREAM, declarationStream); statementParser.parse(statementParserData); } } }
Implement Parser Bootstrap in the statement parser
panda/src/main/java/org/panda_lang/panda/framework/language/parser/implementation/statement/StatementParser.java
Implement Parser Bootstrap in the statement parser
Java
apache-2.0
4aafb5e5304e23849b7960d5dfe8c793c458d8fe
0
osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi
/* * ============================================================================ * (c) Copyright 2005 Nokia * This material, including documentation and any related computer programs, * is protected by copyright controlled by Nokia and its licensors. * All rights are reserved. * * These materials have been contributed to the Open Services Gateway * Initiative (OSGi)as "MEMBER LICENSED MATERIALS" as defined in, and subject * to the terms of, the OSGi Member Agreement specifically including, but not * limited to, the license rights and warranty disclaimers as set forth in * Sections 3.2 and 12.1 thereof, and the applicable Statement of Work. * All company, brand and product names contained within this document may be * trademarks that are the sole property of the respective owners. * The above notice must be included on all copies of this document. * ============================================================================ */ package org.osgi.impl.service.policy.integrationtests; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.URL; import java.net.URLConnection; import java.security.AllPermission; import java.security.CodeSource; import java.security.PermissionCollection; import java.security.Permissions; import java.security.Policy; import java.security.PrivilegedExceptionAction; import java.security.Security; import junit.framework.TestCase; import org.eclipse.osgi.framework.internal.core.FrameworkSecurityManager; import org.eclipse.osgi.framework.internal.core.OSGi; import org.eclipse.osgi.framework.internal.defaultadaptor.DefaultAdaptor; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.PackagePermission; import org.osgi.framework.ServiceReference; import org.osgi.service.condpermadmin.ConditionalPermissionAdmin; import org.osgi.service.permissionadmin.PermissionAdmin; import org.osgi.service.permissionadmin.PermissionInfo; /** * * TODO Add Javadoc comment for this type. * * @version $Revision$ */ public abstract class IntegratedTest extends TestCase { public static final String ORG_OSGI_IMPL_SERVICE_POLICY_JAR = "file:../../org.osgi.impl.service.policy/org.osgi.impl.service.policy.jar"; public static final String ORG_OSGI_IMPL_SERVICE_DMT_JAR = "file:../../org.osgi.impl.service.dmt/org.osgi.impl.service.dmt.jar"; public static final String ORG_OSGI_IMPL_SERVICE_LOG_JAR = "file:../../org.osgi.impl.service.log/org.osgi.impl.service.log.jar"; public static final String ORG_OSGI_IMPL_SERVICE_CM_JAR = "file:../../org.osgi.impl.service.cm/org.osgi.impl.service.cm.jar"; public static final String ORG_OSGI_IMPL_SERVICE_EVENT_MAPPER_JAR = "file:../../org.osgi.impl.service.event/org.osgi.impl.service.event.mapper.jar"; public static final String ORG_OSGI_IMPL_SERVICE_EVENT_JAR = "file:../../org.osgi.impl.service.event/org.osgi.impl.service.event.jar"; public static final String ORG_OSGI_IMPL_SERVICE_POLICY_USERPROMPT_JAR = "file:../../org.osgi.impl.service.policy/org.osgi.impl.service.policy.userprompt.jar"; public static final String INTEGRATIONTESTS_BUNDLE1_JAR = "file:../integrationtests.bundle1.jar"; public static final String INTEGRATIONTESTS_BUNDLE2_JAR = "file:../integrationtests.bundle2.jar"; public static final String INTEGRATIONTESTS_MESSAGES_JAR = "file:../integrationtests.messages.jar"; public FrameworkSecurityManager secMan; public DefaultAdaptor adaptor; public BundleContext systemBundleContext; public Bundle osgiAPIsBundle; public Bundle eventBundle; public Bundle eventMapperBundle; public Bundle configManagerBundle; public Bundle logBundle; public Bundle dmtBundle; public Bundle policyBundle; public Bundle userPromptBundle; public Bundle integrationTestBundle1; public Bundle integrationTestBundle2; public Bundle integrationTestMessagesBundle; public OSGi framework; public PermissionAdmin permissionAdmin; public ConditionalPermissionAdmin conditionalPermissionAdmin; public Method bundle1DoAction; public Method bundle2DoAction; /** * This policy implementation gives AllPermission to all code sources. * Hopefully the framework will overide this for the bundle code sources. */ public static class VeryGenerousPolicy extends Policy { public void refresh() {} public PermissionCollection getPermissions(CodeSource codesource) { //System.out.println(codesource.getLocation()); Permissions pc = new Permissions(); pc.add(new AllPermission()); return pc; } } public void tearDown() throws Exception { stopFramework(); } public void setBundleAsAdministrator(String location) throws Exception { permissionAdmin.setPermissions(location,new PermissionInfo[] { new PermissionInfo(AllPermission.class.getName(),"*","*")}); } public void startFramework(boolean fresh) throws Exception { cleanAllFactories(); Policy.setPolicy(new VeryGenerousPolicy()); // replace policy file ${user.home}/.java.policy with our own Security.setProperty("policy.url.2","file:policy"); secMan = new FrameworkSecurityManager(); System.setSecurityManager(secMan); adaptor = new DefaultAdaptor(fresh?new String[] { "reset" }:null); framework = new OSGi(adaptor); framework.launch(); systemBundleContext = framework.getBundleContext(); ServiceReference sr = systemBundleContext.getServiceReference(PermissionAdmin.class.getName()); permissionAdmin = (PermissionAdmin) systemBundleContext.getService(sr); sr = systemBundleContext.getServiceReference(ConditionalPermissionAdmin.class.getName()); conditionalPermissionAdmin = (ConditionalPermissionAdmin) systemBundleContext.getService(sr); if (fresh) { // Warning! Don't do this on a real system! permissionAdmin.setDefaultPermissions(new PermissionInfo[] { new PermissionInfo(PackagePermission.class.getName(),"*","IMPORT") }); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_EVENT_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_EVENT_MAPPER_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_CM_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_LOG_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_DMT_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_POLICY_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_POLICY_USERPROMPT_JAR); permissionAdmin.setPermissions(INTEGRATIONTESTS_MESSAGES_JAR, new PermissionInfo[]{ new PermissionInfo(PackagePermission.class.getName(),"*","EXPORT") } ); } eventBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_EVENT_JAR); eventMapperBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_EVENT_MAPPER_JAR); configManagerBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_CM_JAR); logBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_LOG_JAR); dmtBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_DMT_JAR); policyBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_POLICY_JAR); userPromptBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_POLICY_USERPROMPT_JAR); integrationTestBundle1 = systemBundleContext.installBundle(INTEGRATIONTESTS_BUNDLE1_JAR); integrationTestBundle2 = systemBundleContext.installBundle(INTEGRATIONTESTS_BUNDLE2_JAR); integrationTestMessagesBundle = systemBundleContext.installBundle(INTEGRATIONTESTS_MESSAGES_JAR); eventBundle.start(); eventMapperBundle.start(); configManagerBundle.start(); logBundle.start(); dmtBundle.start(); policyBundle.start(); userPromptBundle.start(); integrationTestMessagesBundle.start(); Class cl = integrationTestBundle1.loadClass("org.osgi.impl.service.policy.integrationtests.bundle1.Test"); bundle1DoAction = cl.getDeclaredMethod("doAction",new Class[]{PrivilegedExceptionAction.class}); cl = integrationTestBundle2.loadClass("org.osgi.impl.service.policy.integrationtests.bundle2.Test"); bundle2DoAction = cl.getDeclaredMethod("doAction",new Class[]{PrivilegedExceptionAction.class}); } public void stopFramework() throws Exception { if (framework!=null && framework.isActive()) framework.shutdown(); framework = null; System.setSecurityManager(null); Policy.setPolicy(null); secMan = null; adaptor = null; systemBundleContext = null; osgiAPIsBundle = null; eventBundle = null; eventMapperBundle = null; configManagerBundle = null; logBundle = null; dmtBundle = null; policyBundle = null; integrationTestBundle1 = null; permissionAdmin = null; conditionalPermissionAdmin = null; bundle1DoAction = null; cleanAllFactories(); } /** * There are some factories that can only be set once. Since in the unit tests, * we constantly start and stop the framework, and the framework sets these factories, * we need to clean up. */ public void cleanAllFactories() throws Exception { Field urlFactory = URL.class.getDeclaredField("factory"); urlFactory.setAccessible(true); urlFactory.set(null,null); Field urlConnectionFactory = URLConnection.class.getDeclaredField("factory"); urlConnectionFactory.setAccessible(true); urlConnectionFactory.set(null,null); } }
tests/org.osgi.impl.service.policy.unittest/src/org/osgi/impl/service/policy/integrationtests/IntegratedTest.java
/* * ============================================================================ * (c) Copyright 2005 Nokia * This material, including documentation and any related computer programs, * is protected by copyright controlled by Nokia and its licensors. * All rights are reserved. * * These materials have been contributed to the Open Services Gateway * Initiative (OSGi)as "MEMBER LICENSED MATERIALS" as defined in, and subject * to the terms of, the OSGi Member Agreement specifically including, but not * limited to, the license rights and warranty disclaimers as set forth in * Sections 3.2 and 12.1 thereof, and the applicable Statement of Work. * All company, brand and product names contained within this document may be * trademarks that are the sole property of the respective owners. * The above notice must be included on all copies of this document. * ============================================================================ */ package org.osgi.impl.service.policy.integrationtests; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.URL; import java.net.URLConnection; import java.security.AllPermission; import java.security.CodeSource; import java.security.PermissionCollection; import java.security.Permissions; import java.security.Policy; import java.security.PrivilegedExceptionAction; import java.security.Security; import junit.framework.TestCase; import org.eclipse.osgi.framework.internal.core.FrameworkSecurityManager; import org.eclipse.osgi.framework.internal.core.OSGi; import org.eclipse.osgi.framework.internal.defaultadaptor.DefaultAdaptor; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.PackagePermission; import org.osgi.framework.ServiceReference; import org.osgi.service.condpermadmin.ConditionalPermissionAdmin; import org.osgi.service.permissionadmin.PermissionAdmin; import org.osgi.service.permissionadmin.PermissionInfo; /** * * TODO Add Javadoc comment for this type. * * @version $Revision$ */ public abstract class IntegratedTest extends TestCase { public static final String ORG_OSGI_IMPL_SERVICE_POLICY_JAR = "file:../../org.osgi.impl.service.policy/org.osgi.impl.service.policy.jar"; public static final String ORG_OSGI_IMPL_SERVICE_DMT_JAR = "file:../../org.osgi.impl.service.dmt/org.osgi.impl.service.dmt.jar"; public static final String ORG_OSGI_IMPL_SERVICE_LOG_JAR = "file:../../org.osgi.impl.service.log/org.osgi.impl.service.log.jar"; public static final String ORG_OSGI_IMPL_SERVICE_CM_JAR = "file:../../org.osgi.impl.service.cm/org.osgi.impl.service.cm.jar"; public static final String ORG_OSGI_IMPL_SERVICE_EVENT_MAPPER_JAR = "file:../../org.osgi.impl.service.event/org.osgi.impl.service.event.mapper.jar"; public static final String ORG_OSGI_IMPL_SERVICE_EVENT_JAR = "file:../../org.osgi.impl.service.event/org.osgi.impl.service.event.jar"; public static final String ORG_OSGI_IMPL_SERVICE_POLICY_USERPROMPT_JAR = "file:../../org.osgi.impl.service.policy/org.osgi.impl.service.policy.userprompt.jar"; public static final String INTEGRATIONTESTS_BUNDLE1_JAR = "file:../integrationtests.bundle1.jar"; public static final String INTEGRATIONTESTS_MESSAGES_JAR = "file:../integrationtests.messages.jar"; public FrameworkSecurityManager secMan; public DefaultAdaptor adaptor; public BundleContext systemBundleContext; public Bundle osgiAPIsBundle; public Bundle eventBundle; public Bundle eventMapperBundle; public Bundle configManagerBundle; public Bundle logBundle; public Bundle dmtBundle; public Bundle policyBundle; public Bundle userPromptBundle; public Bundle integrationTestBundle; public Bundle integrationTestMessagesBundle; public OSGi framework; public PermissionAdmin permissionAdmin; public ConditionalPermissionAdmin conditionalPermissionAdmin; public Method bundle1DoAction; /** * This policy implementation gives AllPermission to all code sources. * Hopefully the framework will overide this for the bundle code sources. */ public static class VeryGenerousPolicy extends Policy { public void refresh() {} public PermissionCollection getPermissions(CodeSource codesource) { //System.out.println(codesource.getLocation()); Permissions pc = new Permissions(); pc.add(new AllPermission()); return pc; } } public void tearDown() throws Exception { stopFramework(); } public void setBundleAsAdministrator(String location) throws Exception { permissionAdmin.setPermissions(location,new PermissionInfo[] { new PermissionInfo(AllPermission.class.getName(),"*","*")}); } public void startFramework(boolean fresh) throws Exception { cleanAllFactories(); Policy.setPolicy(new VeryGenerousPolicy()); // replace policy file ${user.home}/.java.policy with our own Security.setProperty("policy.url.2","file:policy"); secMan = new FrameworkSecurityManager(); System.setSecurityManager(secMan); adaptor = new DefaultAdaptor(fresh?new String[] { "reset" }:null); framework = new OSGi(adaptor); framework.launch(); systemBundleContext = framework.getBundleContext(); ServiceReference sr = systemBundleContext.getServiceReference(PermissionAdmin.class.getName()); permissionAdmin = (PermissionAdmin) systemBundleContext.getService(sr); sr = systemBundleContext.getServiceReference(ConditionalPermissionAdmin.class.getName()); conditionalPermissionAdmin = (ConditionalPermissionAdmin) systemBundleContext.getService(sr); if (fresh) { // Warning! Don't do this on a real system! permissionAdmin.setDefaultPermissions(new PermissionInfo[] { new PermissionInfo(PackagePermission.class.getName(),"*","IMPORT") }); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_EVENT_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_EVENT_MAPPER_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_CM_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_LOG_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_DMT_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_POLICY_JAR); setBundleAsAdministrator(ORG_OSGI_IMPL_SERVICE_POLICY_USERPROMPT_JAR); permissionAdmin.setPermissions(INTEGRATIONTESTS_MESSAGES_JAR, new PermissionInfo[]{ new PermissionInfo(PackagePermission.class.getName(),"*","EXPORT") } ); } eventBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_EVENT_JAR); eventMapperBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_EVENT_MAPPER_JAR); configManagerBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_CM_JAR); logBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_LOG_JAR); dmtBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_DMT_JAR); policyBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_POLICY_JAR); userPromptBundle = systemBundleContext.installBundle(ORG_OSGI_IMPL_SERVICE_POLICY_USERPROMPT_JAR); integrationTestBundle = systemBundleContext.installBundle(INTEGRATIONTESTS_BUNDLE1_JAR); integrationTestMessagesBundle = systemBundleContext.installBundle(INTEGRATIONTESTS_MESSAGES_JAR); eventBundle.start(); eventMapperBundle.start(); configManagerBundle.start(); logBundle.start(); dmtBundle.start(); policyBundle.start(); userPromptBundle.start(); integrationTestMessagesBundle.start(); Class cl = integrationTestBundle.loadClass("org.osgi.impl.service.policy.integrationtests.bundle1.Test"); bundle1DoAction = cl.getDeclaredMethod("doAction",new Class[]{PrivilegedExceptionAction.class}); } public void stopFramework() throws Exception { if (framework!=null && framework.isActive()) framework.shutdown(); framework = null; System.setSecurityManager(null); Policy.setPolicy(null); secMan = null; adaptor = null; systemBundleContext = null; osgiAPIsBundle = null; eventBundle = null; eventMapperBundle = null; configManagerBundle = null; logBundle = null; dmtBundle = null; policyBundle = null; integrationTestBundle = null; permissionAdmin = null; conditionalPermissionAdmin = null; bundle1DoAction = null; cleanAllFactories(); } /** * There are some factories that can only be set once. Since in the unit tests, * we constantly start and stop the framework, and the framework sets these factories, * we need to clean up. */ public void cleanAllFactories() throws Exception { Field urlFactory = URL.class.getDeclaredField("factory"); urlFactory.setAccessible(true); urlFactory.set(null,null); Field urlConnectionFactory = URLConnection.class.getDeclaredField("factory"); urlConnectionFactory.setAccessible(true); urlConnectionFactory.set(null,null); } }
support for test bundle 2
tests/org.osgi.impl.service.policy.unittest/src/org/osgi/impl/service/policy/integrationtests/IntegratedTest.java
support for test bundle 2
Java
apache-2.0
825a062f7ee6b405232875d22c3454c5e8e107fa
0
bitstorm/wicket,topicusonderwijs/wicket,freiheit-com/wicket,apache/wicket,selckin/wicket,mafulafunk/wicket,zwsong/wicket,apache/wicket,klopfdreh/wicket,apache/wicket,topicusonderwijs/wicket,dashorst/wicket,freiheit-com/wicket,selckin/wicket,mosoft521/wicket,bitstorm/wicket,dashorst/wicket,astrapi69/wicket,aldaris/wicket,selckin/wicket,AlienQueen/wicket,bitstorm/wicket,aldaris/wicket,AlienQueen/wicket,klopfdreh/wicket,selckin/wicket,freiheit-com/wicket,mosoft521/wicket,mosoft521/wicket,mosoft521/wicket,mafulafunk/wicket,mafulafunk/wicket,aldaris/wicket,topicusonderwijs/wicket,dashorst/wicket,freiheit-com/wicket,aldaris/wicket,apache/wicket,topicusonderwijs/wicket,astrapi69/wicket,topicusonderwijs/wicket,AlienQueen/wicket,aldaris/wicket,freiheit-com/wicket,apache/wicket,astrapi69/wicket,zwsong/wicket,klopfdreh/wicket,mosoft521/wicket,dashorst/wicket,bitstorm/wicket,zwsong/wicket,selckin/wicket,dashorst/wicket,bitstorm/wicket,klopfdreh/wicket,AlienQueen/wicket,astrapi69/wicket,klopfdreh/wicket,AlienQueen/wicket,zwsong/wicket
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.extensions.ajax.markup.html.autocomplete; import java.util.Iterator; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.model.IModel; /** * An implementation of a textfield with the autoassist ajax behavior {@link AutoCompleteBehavior}. * * Note that you must add your own CSS to make the suggestion display properly, see * {@link DefaultCssAutoCompleteTextField} for an example. * * @see DefaultCssAutoCompleteTextField * @see AutoCompleteBehavior * @see IAutoCompleteRenderer * * @since 1.2 * * @author Igor Vaynberg (ivaynberg) * * @param <T> * The model object type */ public abstract class AutoCompleteTextField<T> extends TextField<T> { private static final long serialVersionUID = 1L; /** auto complete behavior attached to this textfield */ private AutoCompleteBehavior<T> behavior; /** renderer */ private final IAutoCompleteRenderer<T> renderer; /** settings */ private final AutoCompleteSettings settings; /** * Constructor for the given type with default settings. * * @param id * component id * @param type * model objec type */ public AutoCompleteTextField(final String id, final Class<T> type) { this(id, null, type, new AutoCompleteSettings()); } /** * Constructor for the given model and type. * * @param id * component id * @param model * model * @param type * model object type * @param settings * settings for autocomplete */ @SuppressWarnings("unchecked") public AutoCompleteTextField(final String id, final IModel<T> model, final Class<T> type, final AutoCompleteSettings settings) { this(id, model, type, StringAutoCompleteRenderer.INSTANCE, settings); } /** * Constructor for given model. * * @param id * component id * @param model * model * @param settings * settings for autocomplete */ public AutoCompleteTextField(final String id, final IModel<T> model, final AutoCompleteSettings settings) { this(id, model, null, settings); } /** * Constructor for the given model. * * @param id * component id * @param model * model */ public AutoCompleteTextField(final String id, final IModel<T> model) { this(id, model, null, new AutoCompleteSettings()); } /** * Constructor. * * @param id * component id * @param settings * settings for autocomplete */ public AutoCompleteTextField(final String id, final AutoCompleteSettings settings) { this(id, null, settings); } /** * Constructor. * * @param id * component id */ public AutoCompleteTextField(final String id) { this(id, null, new AutoCompleteSettings()); } /** * Constructor using the given renderer. * * @param id * component id * @param renderer * renderer for autocomplete */ public AutoCompleteTextField(final String id, final IAutoCompleteRenderer<T> renderer) { this(id, (IModel<T>)null, renderer); } /** * Constructor for the given type using the given renderer * * @param id * component id * @param type * model object type * @param renderer * renderer for autocomplete */ public AutoCompleteTextField(final String id, final Class<T> type, final IAutoCompleteRenderer<T> renderer) { this(id, null, type, renderer, new AutoCompleteSettings()); } /** * Constructor for the given model using the given renderer. * * @param id * component id * @param model * model * @param renderer * renderer for autocomplete */ public AutoCompleteTextField(final String id, final IModel<T> model, final IAutoCompleteRenderer<T> renderer) { this(id, model, null, renderer, new AutoCompleteSettings()); } /** * Constructor for the given model using the given renderer. * * @param id * component id * @param model * model * @param type * model object type * @param renderer * renderer for autocomplete * @param settings * settings for autocomplete */ public AutoCompleteTextField(final String id, final IModel<T> model, final Class<T> type, final IAutoCompleteRenderer<T> renderer, final AutoCompleteSettings settings) { super(id, model, type); this.renderer = renderer; this.settings = settings; } /** * Factory method for autocomplete behavior that will be added to this textfield * * @param renderer * auto complete renderer * @param settings * auto complete settings * @return auto complete behavior */ protected AutoCompleteBehavior<T> newAutoCompleteBehavior( final IAutoCompleteRenderer<T> renderer, final AutoCompleteSettings settings) { return new AutoCompleteBehavior<T>(renderer, settings) { private static final long serialVersionUID = 1L; @Override protected Iterator<T> getChoices(final String input) { return AutoCompleteTextField.this.getChoices(input); } }; } /** {@inheritDoc} */ @Override protected void onBeforeRender() { // add auto complete behavior to this component if its not already there if (behavior == null) { // we do this here instead of constructor so we can have an overridable factory method add(behavior = newAutoCompleteBehavior(renderer, settings)); } super.onBeforeRender(); } @Override protected void onComponentTag(final ComponentTag tag) { super.onComponentTag(tag); // disable browser's autocomplete tag.put("autocomplete", "off"); } /** * Callback method that should return an iterator over all possible assist choice objects. These * objects will be passed to the renderer to generate output. Usually it is enough to return an * iterator over strings. * * @see AutoCompleteBehavior#getChoices(String) * * @param input * current input * @return iterator over all possible choice objects */ protected abstract Iterator<T> getChoices(String input); /** * @return The {@link IAutoCompleteRenderer} used to generate * html output for the {@link AutoCompleteBehavior}. */ public final IAutoCompleteRenderer<T> getChoiceRenderer() { return renderer; } }
wicket-extensions/src/main/java/org/apache/wicket/extensions/ajax/markup/html/autocomplete/AutoCompleteTextField.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.extensions.ajax.markup.html.autocomplete; import java.util.Iterator; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.model.IModel; /** * An implementation of a textfield with the autoassist ajax behavior {@link AutoCompleteBehavior}. * * Note that you must add your own CSS to make the suggestion display properly, see * {@link DefaultCssAutoCompleteTextField} for an example. * * @see DefaultCssAutoCompleteTextField * @see AutoCompleteBehavior * @see IAutoCompleteRenderer * * @since 1.2 * * @author Igor Vaynberg (ivaynberg) * * @param <T> * The model object type */ public abstract class AutoCompleteTextField<T> extends TextField<T> { private static final long serialVersionUID = 1L; /** auto complete behavior attached to this textfield */ private AutoCompleteBehavior<T> behavior; /** renderer */ private final IAutoCompleteRenderer<T> renderer; /** settings */ private final AutoCompleteSettings settings; /** * Constructor for the given type with default settings. * * @param id * component id * @param type * model objec type */ public AutoCompleteTextField(final String id, final Class<T> type) { this(id, null, type, new AutoCompleteSettings()); } /** * Constructor for the given model and type. * * @param id * component id * @param model * model * @param type * model object type * @param settings * settings for autocomplete */ @SuppressWarnings("unchecked") public AutoCompleteTextField(final String id, final IModel<T> model, final Class<T> type, final AutoCompleteSettings settings) { this(id, model, type, StringAutoCompleteRenderer.INSTANCE, settings); } /** * Constructor for given model. * * @param id * component id * @param model * model * @param settings * settings for autocomplete */ public AutoCompleteTextField(final String id, final IModel<T> model, final AutoCompleteSettings settings) { this(id, model, null, settings); } /** * Constructor for the given model. * * @param id * component id * @param model * model */ public AutoCompleteTextField(final String id, final IModel<T> model) { this(id, model, null, new AutoCompleteSettings()); } /** * Constructor. * * @param id * component id * @param settings * settings for autocomplete */ public AutoCompleteTextField(final String id, final AutoCompleteSettings settings) { this(id, null, settings); } /** * Constructor. * * @param id * component id */ public AutoCompleteTextField(final String id) { this(id, null, new AutoCompleteSettings()); } /** * Constructor using the given renderer. * * @param id * component id * @param renderer * renderer for autocomplete */ public AutoCompleteTextField(final String id, final IAutoCompleteRenderer<T> renderer) { this(id, (IModel<T>)null, renderer); } /** * Constructor for the given type using the given renderer * * @param id * component id * @param type * model object type * @param renderer * renderer for autocomplete */ public AutoCompleteTextField(final String id, final Class<T> type, final IAutoCompleteRenderer<T> renderer) { this(id, null, type, renderer, new AutoCompleteSettings()); } /** * Constructor for the given model using the given renderer. * * @param id * component id * @param model * model * @param renderer * renderer for autocomplete */ public AutoCompleteTextField(final String id, final IModel<T> model, final IAutoCompleteRenderer<T> renderer) { this(id, model, null, renderer, new AutoCompleteSettings()); } /** * Constructor for the given model using the given renderer. * * @param id * component id * @param model * model * @param type * model object type * @param renderer * renderer for autocomplete * @param settings * settings for autocomplete */ public AutoCompleteTextField(final String id, final IModel<T> model, final Class<T> type, final IAutoCompleteRenderer<T> renderer, final AutoCompleteSettings settings) { super(id, model, type); this.renderer = renderer; this.settings = settings; } /** * Factory method for autocomplete behavior that will be added to this textfield * * @param renderer * auto complete renderer * @param settings * auto complete settings * @return auto complete behavior */ protected AutoCompleteBehavior<T> newAutoCompleteBehavior( final IAutoCompleteRenderer<T> renderer, final AutoCompleteSettings settings) { return new AutoCompleteBehavior<T>(renderer, settings) { private static final long serialVersionUID = 1L; @Override protected Iterator<T> getChoices(final String input) { return AutoCompleteTextField.this.getChoices(input); } }; } /** {@inheritDoc} */ @Override protected void onBeforeRender() { // add auto complete behavior to this component if its not already there if (behavior == null) { // we do this here instead of constructor so we can have an overridable factory method add(behavior = newAutoCompleteBehavior(renderer, settings)); } super.onBeforeRender(); } @Override protected void onComponentTag(final ComponentTag tag) { super.onComponentTag(tag); // disable browser's autocomplete tag.put("autocomplete", "off"); } /** * Callback method that should return an iterator over all possible assist choice objects. These * objects will be passed to the renderer to generate output. Usually it is enough to return an * iterator over strings. * * @see AutoCompleteBehavior#getChoices(String) * * @param input * current input * @return iterator over all possible choice objects */ protected abstract Iterator<T> getChoices(String input); }
WICKET-4586 add getter for renderer in autocompletetextfield
wicket-extensions/src/main/java/org/apache/wicket/extensions/ajax/markup/html/autocomplete/AutoCompleteTextField.java
WICKET-4586 add getter for renderer in autocompletetextfield